var/home/core/zuul-output/0000755000175000017500000000000015136772646014546 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015136775504015506 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000234025415136775432020275 0ustar corecore{ikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD -/,g i.߷;U/;?FެxۻfW޾n^X/ixK|1Ool_~yyiw|zxV^֯v5gCh31 )Kh3i J1hG{aD4iӌçN/e] o;iF]u54!h/9Y@$9GAOI=2,!N{\00{B"唄(".V.U) _.f*g,Z0>?<;~9.뙘 vKAb;-$JRPţ*描Լf^`iwoW~wSL2uQO)qai]>yE*,?k 9Z29}}(4ҲIFyG -^W6yY<*uvf d |TRZ;j?| |!I糓 sw`{s0Aȶ9W E%*mG:tëoG(;h0!}qfJz硂Ϧ4Ck9]٣Z%T%x~5r.N`$g`Խ!:*Wni|QXj0NbYe獸]fNdƭwq <ć;_ʧNs9[(=!@Q,}s=LN YlYd'Z;o.K'[-הp|A*Z*}QJ0SqAYE0i5P-$̿<_d^"]}Z|-5rC wjof'(%*݅^J">CMMQQ؏*ΧL ߁NPi?$;g&立q^-:}KA8Nnn6C;XHK:lL4Aْ .vqHP"P.dTrcD Yjz_aL_8};\N<:R€ N0RQ⚮FkeZ< )VCRQrC|}nw_~ܥ0~fgKAw^};fs)1K MޠPBUB1J{Ⱦ79`®3uO0T-Oy+tǭQI%Q$SiJ. 9F[L1c!zG|k{kEu+Q & "> 3J?5OͩLH.:;ߡ֖QʡCOx]*9W C;6)SCVOאUʇq )$ {SG!pN7,/M(.ΰdƛޜP16$ c:!%Piocej_H!CEF L훨bِp{!*({bʂAtĘ5dw9}ŒEanvVZ?C}!w,ƍͩ?9} [oF2(Y}Q7^{E}xA|AŜt;y}=W<*e'&Ж0(ݕ`{az^su/x)W>OK(BSsǽҰ%>kh5nIYk'LVc(a<1mCޢmp.֣?5t罦X[nMcow&|||x:k/.EoV%#?%W۱`3fs䓯ҴgqmubIfp$HhtLzܝ6rq/nLN?2Ǒ|;C@,UѩJ:|n^/GSZ;m#Nvd?PqTcLQMhg:F[bTm!V`AqPaPheUJ& z?NwpGj{VjQS,؃I'[y~EQ(S +mpN, Mq 70eP/d bP6k:Rǜ%V1Ȁ Z(Q:IZaP,MI6o ޞ22ݡjR:g?m@ڤB^dh NS߿c9e#C _-XѪ;Ʃ2tStΆ,~Lp`-;uIBqBVlU_~F_+ERz#{)@o\!@q['&ш&$"THl#d0 %L+`8zOҚƞ`wF~;~pkѽ)'cL@i]<ք6ym®Yi&s`dyMX](^!#h k:U7Uv7쿻чd)wB5v-)s蓍\>S[l52, 5 CۈP$0Zg=+DJ%D  *NpJ֊iTv)vtT̅Rhɇ ќuގ¢6}#LpFD58LQ LvqZDOF_[2ah?lm$K/$s_. WM]̍"W%`lO2-"ew@E=0D"\KjPQ>Y{Ÿ>14`SČ.HPdp12 (7 _:+$ߗv{wzM$VbήdsOw<}#b[E7imH'Y`;5{$ь'gISzp; AQvDIyHc<槔w w?38v?Lsb s "NDr3\{J KP/ߢ/emPW֦?>Y5p&nr0:9%Ws$Wc0FS=>Qp:!DE5^9-0 R2ڲ]ew۵jI\'iħ1 {\FPG"$$ {+!˨?EP' =@~edF \r!٤ã_e=P1W3c +A)9V ]rVmeK\4? 8'*MTox6[qn2XwK\^-ޖA2U]E_Dm5^"d*MQǜq؈f+C/tfRxeKboc5Iv{K TV}uuyk s" &ﱏҞO/ont~]5\ʅSHwӍq6Ung'!! e#@\YV,4&`-6 E=߶EYE=P?~݆]Ōvton5 lvǫV*k*5]^RFlj]R#Uz |wmTeM kuu8@8/X[1fiMiT+9[ŗ6 BN=rR60#tE#u2k *+e7[YU6Msj$wբh+8kMZY9X\u7Kp:׽ ^҃5M>!6~ö9M( Pnuݮ)`Q6eMӁKzFZf;5IW1i[xU 0FPM]gl}>6sUDO5f p6mD[%ZZvm̓'!n&.TU n$%rIwP(fwnv :Nb=X~ax`;Vw}wvRS1q!z989ep 5w%ZU.]5`s=r&v2FaUM 6/"IiBSpp3n_9>Byݝ0_5bZ8ւ 6{Sf觋-V=Oߖm!6jm3Kx6BDhvzZn8hSlz z6^Q1* _> 8A@>!a:dC<mWu[7-D[9)/*˸PP!j-7BtK|VXnT&eZc~=31mס̈'K^r,W˲vtv|,SԽ[qɑ)6&vד4G&%JLi[? 1A ۥ͟յt9 ",@9 P==s 0py(nWDwpɡ`i?E1Q!:5*6@q\\YWTk sspww0SZ2, uvao=\Sl Uݚu@$Pup՗з҃TXskwqRtYڢLhw KO5C\-&-qQ4Mv8pS俺kCߤ`ZnTV*P,rq<-mOK[[ߢm۽ȑt^, tJbظĦ FqQI.ȨHWo;Nw$͹O$oEE-eq=.*Dp,V;(bgJ!gF)892sw*+{[or@x,))[o新#.͞.;=fc<)((b۲Eumw峛M2,V[cm,S~ AF~.2v?JNt=O7^r.@DEuU1}g$>8ac#sĢB\PIPfwJQJ;Qxm &GBf\ZA$Ba-z|A-I @x70 晪MV)m8[6-Te@`E|=U D(C{oVa*H7MQK"<O%MTTtx袥:2JޚݶKd7UZihRk71VDqiގ\<:Ѓ3"gJJčE&>&EI|I˿j2ǯɘCGOa9C1L ={fm&'^tigk$DA' elW@Tiv{ !]oBLKJO*t*\n-iȚ4`{x_z;j3Xh ׄ?xt.o:`x^d~0u$ v48 0_ | E"Hd"H`A0&dY3 ً[fctWF_hdxMUY.b=eaI3Z=᢬-'~DWc;j FRrI5%N/K;Dk rCbm7чsSW_8g{RY.~XfEߪg:smBi1 YBX4),[c^54Sg(s$sN' 88`wC3TE+A\.ԍל9 y{͝BxG&JS meT;{З>'[LR"w F05N<&AJ3DA0ʄ4(zTUWDdE3̻l^-Xw3Fɀ{B-~.h+U8 i1b8wؖ#~zQ`/L 9#Pu/<4A L<KL U(Ee'sCcq !Ȥ4΍ +aM(VldX ][T !Ȱ|HN~6y,⒊)$e{)SR#kהyϛ7^i58f4PmB8 Y{qeφvk73:1@ƛ.{f8IGv*1藺yx27M=>+VnG;\<x7v21՚H :[Γd!E'a4n?k[A׈(sob 41Y9(^SE@7`KIK`kx& V`X0,%pe_ן >hd xе"Q4SUwy x<'o_~#6$g!D$c=5ۄX[ു RzG:柺[ӏ[3frl ô ހ^2TӘUAT!94[[m۾\T)W> lv+ H\FpG)ۏjk_c51̃^cn ba-X/#=Im41NLu\9ETp^poAOO&Ats]iSCcvb$^Nyo$[[tUCE9szɪ)u ۓcT)D:fci[*`c#:VhfFp佬)/dڂ+ uR<$}Kr'ݔTW$md1"#mC_@:m PGEu&ݛȘPˬ-O\@`xr`"F'Iٺ*Dn@)yr|^!3Ír!S$,.:+dnQ̥uF&Ix.Tp/olʚ[įy.xF%ڄPw5#5d짩a{|=BKŨʐPqO K\{jDiy@}b|'J79ߜih(+PKO;!M6rN+LxE>^DެEڬT+1+tnsǴ5RHİ;qB\}X` >+%ni3+(0m8H-+zAep!*)jxK:U`~gfu#h'Wؾ.."ןGRLIۘT==!TlN3ӆv%oV}rN~ˊc,_,=COU9:L-Ǐw<6o3mv8k vGE|=aīNLd&@yz\'))H;hXݍS&(w9Z,K44|MZ'0-bB$ !)6@I<#`L8턻r\Kuz(]}b'$^LJ<\HGbIqܢcZW3{jfѐ6 QڣPt[:GfCN ILhbB.*IH7xʹǙMVA2J7W)@9 Ѷ6/kىY* 85{pMXK]o$h{ rҎt 5s#ŜxpADx"Xh4|>[XQ-xߵă@pE:y]/k"(MC`ʶϊGi+39#gNZYE*Q۷9muB`9`LDhs4Ǩ9S`EkM{rL<˙ik; HD;;3!4 2Y.$Dwiu|+lO:k$]ԜYLUҞ6EmH:;W'w)߽hO^Z6KhwefӞ@蹃DeEѯsX %q7<# '9݋KJ>=$ts,cJZڗOx2c6 .1zҪR "^Q[ TF )㢥M-GicQ\BL(hO7z!a~}QS{^OlXr+XKc$2iσֹH<6N8HSgo>uuik{Jm(W F@@{W+ߑ/`xV,)ޖ,3~JPͪM|$oV1yU<̐t6 T m^ IgINJ\Оf*Z"I)+>n#y 9D*A$$"^)dVQ.(rO6ړ߾Zw_Ȣaޒu'-:;_mw,*=.ZG n >X8Ī[xW%dT:`ٓ~:QO,}j6j!yڦʲT:Pqҋh] >H+&=>g| ZEzqs n[?_Os=V:х@ifg,wg}QJUtEsQy-u7Yr+"*Dtů/.޴oo7jG?늲\OGc]^=n\t3^邳šL"8k8M~?NF]<^^0|/G}s7ۢI~}:?ݩTL?9B3 , N!Ѭ}'| #]Y1E-8 vP4N0#C'dvܸӺ.IH"ŐR [@_@H<%۽Ҝ ܣTvvVUZ om?'4%hs.NjSy*LD ZmWb9q]Z3\mȐb#;|yɚ YZދ8H KV,XHS:89_Vݯ,%M{,#SY(H_eSFX#G_XcrD6P0kiL)2f֏tQt@h0ulr N&^ )q"!Ӽފ ϠYs둴.)t +6dF-|},D"?ߜ4LhT>OAH$p"_d78[fNѕ|M { Q+&>7}r3 >6Wɟ7Ƅ%VDSWn 0,qh! aE-Z%ܹpU:&&fS,xS"CV8i8'-sKB<չs">|&M!3~ZDͳ0*Vr'tdQu!4YhdqT nXeb|qt7>! {&~ĊL:}1*8&6f5 %>~R݄}Wߧ@ȏ%ĹCuWapf٠r,$Zdś 1-d}2Ozb@3?]^M ܓmBɽkQ| }^~n6Fg86}I h5&Xۘ(_?,c ? z:9 Z>eUŎ]xTL̵F8ՅX->'q߾ӻoIQvdC#i  *!Jg,J?o(R~IBKb+{P.T! =ĦiTob d<>QHrSOKqS7ѝBYǭ~TR"?^RNqٺa 8{⬻wʹ`OW[F%mgXG2&XL_ʣ^"Lpe:9[_v~\:P ؇'k01Q1jlX)/ΏL+NhBUx~Ga>Z"Q_wjTLRˀtL L+BT҂ll魳cf[L̎`;rK+S- (J[(6 b F? ZvƂcW+dˍ-m𢛲@ms~}3ɱ© R$ T5%:zZ甎܋)`ŰJ38!;NfHohVbK :S50exU}W`upHЍE_fNTU*q%bq@/5q0);F74~'*z[\M-~#aSmMÉB2Nnʇ)bAg`u2t"8U [tJYSk, "vu\h1Yhl~[mhm+F(g 6+YtHgd/}7m]Q!Mę5bR!JbV>&w6οH+NL$]p>8UU>Ѫg39Yg>OF9V?SAT~:gGt $*}aQ.Zi~%K\rfm$%ɪq(%W>*Hg>KStE)KS1z2"h%^NEN?  hxnd/)O{,:خcX1nIaJ/t4J\bƀWc-d4M^d/ ʂK0`v%"s#PCoT/*,:[4b=]N&, ,B82^WK9EHLPm))2.9ȱ  QAcBC-|$M\^B!`}M^t+C~Lb }D>{N{Vt)tpDN,FCz~$)*417l;V iэ(_,j]$9O+/Sh]ice wy\Mڗ$,DJ|lj*à␻,?XAe0bX@ h0[}BU0v']#Vo !ې: Z%ƶ(fl>'"Bg< 0^_d0Y@2!ӸfZ{Ibi/^cygwדzY'Ź$:fr;)ٔf ՠ3Kcxwg*EQU{$Sڸ3x~ 5clgSAW"X Pҿ.ظwyV}̒KX9U1>V..W%GX +Uvzg=npu{do#Vb4ra\sNC/T"*!k愨}plm@+@gSUX覽t01:)6kSL9Ug6rEr(3{ xRP8_S( $?uk| ]bP\vۗ晋cgLz2r~MMp!~~h?ljUc>rw}xxݸǻ*Wu{}M?\GSߋ2ꮺ5w"7U0)lۨB0ח*zW߬V}Z۫ܨJ<]B=\>V7¯8nq~q?A-?T_qOq?5-3 |q|w.dަ'/Y?> (<2y. ">8YAC| w&5fɹ(ȊVã50z)la.~LlQx[b&Pĥx BjIKn"@+z'}ũrDks^F\`%Di5~cZ*sXLqQ$q6v+jRcepO}[ s\VF5vROq%mX-RÈlб 6jf/AfN vRPػ.6<'"6dv .z{I>|&ׇ4Ăw4 [P{]"}r1殲)ߚA 2J1SGpw>ٕQѱ vb;pV ^WO+į1tq61W vzZ U'=҅}rZ:T#\_:ď);KX!LHuQ (6c94Ce|u$4a?"1] `Wa+m𢛲`Rs _I@U8jxɕͽf3[Pg%,IR Ř`QbmүcH&CLlvLҼé1ivGgJ+u7Τ!ljK1SpHR>:YF2cU(77eGG\ m#Tvmە8[,)4\\=V~?C~>_) cxF;;Ds'n [&8NJP5H2Զj{RC>he:ա+e/.I0\lWoӊĭYcxN^SPiMrFI_"*l§,̀+ å} .[c&SX( ( =X?D5ۙ@m cEpR?H0F>v6A*:W?*nzfw*B#d[se$U>tLNÔ+XX߇`cu0:U[tp^}{>H4z 4 (DtH-ʐ?sk7iIbΏ%T}v}e{aBs˞L=ilNeb]nltwfCEI"*S k`u ygz[~S [j3+sE.,uDΡ1R:Vݐ/CBc˾] shGՙf 2+);W{@dlG)%عF&4D&u.Im9c$A$Dfj-ء^6&#OȯTgرBӆI t[ 5)l>MR2ǂv JpU1cJpրj&*ߗEЍ0U#X) bpNVYSD1౱UR}UR,:lơ2<8"˓MlA2 KvP8 I7D Oj>;V|a|`U>D*KS;|:xI/ió21׭ȦS!e^t+28b$d:z4 .}gRcƈ^ʮC^0l[hl"য*6 ny!HQ=GOf"8vAq&*țTOWse~ (5TX%/8vS:w}[ą qf2Lυi lm/+QD4t.P*2V J`\g2%tJ4vX[7g"z{1|\*& >Vv:V^S7{{u%[^g=pn]Y#&ߓTί_z7e&ӃCx;xLh+NOEp";SB/eWٹ`64F 2AhF{Ɩ;>87DǍ-~e;\26Lة:*mUAN=VޮL> jwB}ѹ .MVfz0Ïd0l?7- }|>TT%9d-9UK=&l&~g&i"L{vrQۻou}q}hn+.{pWEqws]]|/ǫ\}/J.MLmc ԗWrU}/Ǜ+sYn[ﯾeywyY]]¨Kpx c./mo;ߟRy*4݀wm&8֨Or4 &+Bs=8'kP 3 |}44S8UXi;f;VE7e4AdX-fS烠1Uܦ$lznlq"җ^s RTn|RKm;ԻZ3)`S!9| ?}m*2@"G{yZ${˪A6yq>Elq*E< NX9@: Ih~|Y4sopp|v1f2춓t$ėczF_/Dֱg'N3`I6%)E뷊dɗb2Itbu]du`8K(*L*%:H0TP$[.9]Mq|2i]ɯtJ5fr׶S4ϊf8(A [XPQ1ƀI\o:Tyi̱N,ں}´z8~I7mXO<Oz(A9Klq @ lxI͜~ E&*ѠLaI]gY,i9jbGf2Wϴ1)(c~hrCAXRG%"eu.a2 ޖH\7p=װC_:Q`x.4Y"Ydu~DAuNcדZ~@(a.$ D27s0Q- _l= JyX[k_1iښ5Rig1(DKBSq9E!XŨgu#2j.G]r5*s4Q[Uԛ/h*h^Uyv@w,('(IJx%ZWZ7T'(+F XQbZ/Zԋ )UUu%G)'9kE[eU#/E8' l b%j/q/mw~Vh`pA!8) b ~A!6<^|Z87_Q0RGD@HC?63h D6=׍]۴LgQ{`<ޚkJ,n v{A,2hDVy%bJθ[v9ڝ?p뭹@m#wY7FɊ멜-p8J q:fN8 9INg$0|0rryޡ!rC۪׬u- ]q[N*g$ 7?Y!B9G.I |K[Uq:vatOd&Y2ErNw\5QN{o}XNT6i>llӦa. :8|cu }/)j~:1 ,i8Og@Rfb I,o4(PtgƳS)x9 KSu,Z 'T,&Vixh\$01I]wI 0i:;c8&ϛ#1 `!j Y9 uBP/7xI{<$N5AE.YA6#>|RWS%TANuNb*7 ) }5rXg?KW=͊.5.`/ &l*־nDv뤭4(!HUiI Rb:w 64: '[v!?$=o&az' t`D|w"x\S<;uSyk=2NYڼb5P!a:=otK.ˬ.il>gr!LT7Q͉<@]ވ;iה>|#|G'D6qSYI!Vdu>ME^γK`meԯҞa^OatB 2Kes*ڒ^ Y/-鏳=0 7dI3S}EWk%aY Ĥ3!vUS|*q]D{Vcp*vi7 S >4P"nKI[./klY-ia>W߯jp'a3Qkr4x$i[.(?BQv{HͭJM\ I8߿ku焝rZdG1d GBi{r w"o$M(ūv;9sfQ5q_q''M,T1-}ßMĬD 6{ގ4M!>@Gf$$ndi)"I*MyIi-*}cOR{Ei^\ђxWGN!pD]T<ˮ"DlК-x\% 琀-8OG'ɍkܐۺ>GE{ӇxPR-GȂ X5N&Hl1o]! 騡8@Uq|r Siʆqݶiv3L ja>>j_FDM G4R6ۅTo>geہyII N*j,j+vwָiZҽ0w$8"kW,Ȯ_JU˚HIgN2]8Mܞ`\xmdռ& -WvB(z6J \yM#Aϳ;>Tm/RK!n<8GO/lDs<5Kt({HxN>Ir8-)L&Q(#Q'1*M}/V!h@4+1tm-I2]v޵{xA*kM7Lp]H*DZ{UnV*%,32Wqa0_-նp<6M+ų4+Z*^Α<ʨF"NUm(7"Pݘ*ٕ4,)Tq&ZwkOmͅm/0mե2G.FZ2 g;qDjk j,1Eh+l* /} 0gЈ3q*]A͜$A5Ta̯,XFbp9jboЪ aF ֭"ycTdz*<>P-Y\γhd*ݷ&d7[tiNiDw>H%򡶐`1i>Ӈ1Uif"R H'.ӵqaLF,P*눆`US2 u$ciDJeET*Ϟ~4}<@";-.{4DJ =m3t*iu aiFI]7} 0XnxX(M-~lG%Gn^mjʕw;ώruo+ (hU1cEX\v~]n|@)a]IZ]aȴ:㺒xGHVmX|,qSѐq!C8[bub=C#w` +@ 5V65Hۋpslk@C!ȊG5Ě1XMlj;"hcm&*ؔcp ;6L{lVw!/\(DoZӎ'tqU|Q{/#g3z󭫻6 ,o2<H{\ba`BUcfMk^аSTM loyPYg1gmqJڞqFp-rFoj>Հ\@LCA0ψsBӾ=s8žfpn9y3~w 39' 'G;s@{*\ CXb@,$=pKw_xbNiggn̰nn{vnY3]> H-BOi@݈,Bf:sr0>{7?6;6|5ۄL y=q=`$>n7::S[ !@"M:x ѝGB[ed.3:sY b3ل^8Y;Iܱ_BJ񼃘,]9!N%ػG)SR:;v(Q??ܥRl}5Ks{ 4qӝo!mL!r+~(`<}0*m#[-J5$`FĵM`P$es#ZR4zX ul_\,kˬe% uD/ޛh e3[s-\"J`,={|mi|YķqWqf"ʢ.?zdi"Mq] '6:/Ҽ>an5\gejJ6=Z,{ 'bKE  :w[ p SeG~gOS/ĐMA.xvϱ{:ѭʝ) nJ! hm PEn/ ( @q\~ `<';p('F{>;XX}GeXXNBmw/s ځg/m!30h7oO' +ఃc&@Ch|&okζcA?oXB(9B2(0徜2tow| pՍiv)ziݴH4d FCkѷ ~,~RuNjW6W Z cՑlbUcCƥEk!C#療NyJ; Y3Lu#he\9- 45[ϕ<PQfѱ`XRw( +s}"$:6 u) ^::o/? s:ZܐM8ӰkatKM47[<6oڕ';gxfיSJ{03?u|K_u?}rԺ]&pdj1ԂLC ؋oػb4-#аJŶf vbN\Nz\Nv6m:ڍ-g:٭ڍaK}4ƺr:0H$ ;PXWYJ  5' drqZPXWqX!CNge!{+`a0TPhiة͓ZvCbBKC k/*C/N CKÙ(ڦu/>:4l m-st3qf9Pt Dj=t$Tئ!wR1Q@RbLCCq4aHUK8פ,^=$z ?r];SD(a4B8XN$=55L@3pW(E91rE~Rq\%1Wr?bd\pưܑC)7`o&U&Y9MJ{I5c}YW]S ಸMJTU `:0pB1+q~xĤί x~QMo YD(J(DRj l- .lL:ØwzSu仮/:)z_^z,ڗן xi2!1!p_pU HYjQy =XUpUc:ҖK\o}v` xJI I%u[ r\}wY@eBR4Ia$Y| .CVÁQ?B5^{=ˢz_ )6"Û'' n OLRsľИϨD4ߦ+Ot,ӂmL.:]iYG3?`HOɇݬ@.nj8F>%/p4]: ڜfzK|ϵ)Fx’%_?p>B>zzR xJ8o;$ ՟fڦ߿O>4>ުQ`vA%oF XPӷz5C'IM@VKlw~2M;:.34At( Rsbk#|˿垰g~쾅I{RSs2oT1MGKSZ A_Qwtf[6뽹zfS~'e:ڿ+.t.QHgzmZ(]14tm:E~lKo>NI;qY+| ME@ YP&:݅bŏ8..]$7Y:5*l,$&ܠ2{\ihȁ#B xNF l,j&TME=R&^x[r] @ۀ `SE* ITV-x)P ҟTeB<}pO\RyV%:h| SG|!Ǐe7dS =1:ãu5Hq GhE%_Y2% 5^4|#2ʐQcGZ^=ʓj5 PqM[*x鿌 bD$ Q^6// ~[HZ} rRg7fYj}8xh%|<z$w+>Hlh: SL~s\+@<8z{a\#vU_KgR0@8\WI1U)2?QP$L)Z`M[]rTL) O` රt 9[L@i szJ``?Ρ*חNBNd68JO=~WQ$чb^e ,߳MPP\LwGZEmX 0@S4&r%H )V!pWpB5%rk"# 5!8|~jO9;`W*S`ܫEuu瓁rs>OAÄhXJ zZPЂ|x.wrT =t{M\nt|iRUzueAot) l8[x_.e?s?<˪p9 ϭݓA4yMNn+5GsGE& KDh?F(xC%XO.ڻ|/K g{rд=Qss fcDRP{xU<ݛu\ ߓ/<+/TnH! \RQb@講h2B:p'1pe&d})&FZ+qR☥Lڐ??X#ܕս)jhSojC9_"HkCǬIHf6\ցĔ`BmHŰc8buTZ qZdN^'uşw]\*hIDm,"x{O J]hR=Hm 5N_M ?vzK-]H@Wu~%Hɵd2tq qtq훓RC <²ч ?2\(c;jЪ3%>0 >6l!}3?ϧӞ J b.s/;FWgE "U46#Sߓ(`V"it>LP@FPp{! czX_5ܸK$^yYHc4P>"[9,+q^@0' QaQosYmi&>tVU+D83{|OJ&s):k| 7:;3>i)^S'7Yo(x{ 1ˍ~1+h]o-Ss^-sJHyu5WzDh:jV4/&$q"PavMHj>  ,z&$g;t2Ap7QaJ+uNzNlEYx.* Nr|$N". w: +rọGָ]_p۷|jґHPص[Qeʧ!׭?gg{ǮD(0a`vMDߍCmòj&hR$s'γp;a(7"3.qEQji)ynZ9t>_S~凨23<jsO  6ײѧڛz}Mn8jfXe#L8:?1?b:>#lZHSBm?Ԟ>rpL}C 6cC/ 3ۥn6 !o2ȲM/]ncG?+F/̂}\%Z%,J Dr3Klj ʿ;|r綼 _f׏yR.Vo޸7ov{j"ɐՏ6y7WJ ۻ__ybJޖ_չrej_]5f;_M?6+wB?8o`qThr.l3k|iz/,'si^0V@!ImsBc^dj^hu'(~2_jryY{Yɋ\ڹ9l˚vT]˟Qpv^ꏝЅ[S (oD^2$`뷰{j\S$RSupPF T|>ԜR[Ua)Hy a@y\wQ8*♃f΃I3Aet)HÑ"S'qFhB tNIFm$ڕHt~c9+$Q\2z"pIV UXA tAЏ&b(`@x6 # A$8>|)gn-kRЕ'%"8xL7 )NTF肣P=Pd+hbr~_ `ԵS6J_ukj+5,y`o^ֻk$1vm26R@apZ '5GH "HC3ع!U^@)V9 hn\bQLS~I' b?,5 q5)"qU9B+򣈁6>ɱb0iyت.3G;IO`UJ]2tc8-QZ:HPͅ8^%'x÷/3>ր3+xXXp3pW%ёs=S$dwͥ6eXUX:٬lɻ&$ْ~7rV-&oosQˍtM0~5t1ivw)$wzVKLE{RKVe홃`ɵc%bm"l~Ο{(XPz5^=KLjjHx{e0[ "<[JOCfv((~#sⰒy$T6T˕>G? tgaOwv^noN4Y;@tdL&zdP"CpB=a we#/^V) J j9g PD ND~YvQ @ -<8Co`g^^Ӫ9(ȕ;eJs c -3I'V.8ڱV]Je =:*j̴3H/zA <"@9 ^0Sr+:[UV aۘ=t>j 1Ho$Գ=6j܌F]vq^vPnzKTbKF<{FFKq3ط9׹=)ua,UMHOyE#z&}ޅS82=&e_m͜;X2i hR*BR"}a`ۨG&er xbqUi*i"vox e^. fY8jqnp|^S%>a33]2Iyv 2⦘Q)N3)e|Q1i&}bPD~\198L1)1:g/ =Y&%a,Q{ʔ6E lpsc<U 2 # UVĉ{C#h0K<-5SlE$IiX.,eJo~ _Afhg" k풘bLg5FB:s 9SKH:獪O]p{&zm{7W>8r7x;g|E W. 1 bĻQ"^ p|وZsd9PHe]l@z肢 UZy.WB;xnQȲrtK K6:cLIw~i;x\ 8ڤ4sr.4i%wk ̧-Ow.8?Mp?/cG/H~zo<9IxJ-5vȽQ/:O|*.Z . J&P%mI%ʨ4qIR?ړfꂤVJLiJr Pzvj*nU xw_gC{.::Wg D -K;LsBRlEL<#sp+MO-%X 4o31D U,DS"=Imo]$V-% f;:sY̖yr>=5eO x9 >%cц~+ͥ]߈0n_Qʟ~~"^diQ\qͫք꜓(P8ɣJJwq4b&]>ư|hO sɍ僷H>{A{^Z$,7" d(=M4w npy:D8z"B|{("Bo<@5<]YNlP~'ޗUGoD:sdeeTV5Qd93x[Є5&*GH:[=wqpݹtSnSn^LΊ~!!+:st  q#9ًW,KJ%IqP.hNIcK(5ؔdJހpN/"_AmIxڶ-\K|-Ր.H6-9<5ň&%a˫ǘB;I3*3/$rjVxG:J#^o5ӵpok_ʻ l}\8YKQ'kqarIٲqlPeSkS<'*#*E)xp ӟKEy\)@!5_*cx;_Ѡz ΄2K!Q$}ebvqnƠj!4ҥ)qELgXkFwgf_%s3M9Fxcj۶_hV6_4tܞxpVTRJs.Hʢd;L1r?E ɢN2qJvҏ3:!WMdgZ~0 wo%BYIK;K$՞/i{Bb^=mr4x_:oc'h&8V}6 mk,!&26iawZ.`}?JQ_{׮6,LݟaPiߎl5DO#kj_[ȎG!{zODZ(ڦ}VW%fy! O`!h<-VY(UU ptj-D6>" b?4d=/轁ogIO3n[ ,)۹o/ 1|;q9#_V`6EAY| B|7@Xu#߮D-lǥ`1[uOjUؚS؇Q:M078]Y9n,vO)Ky~0 G+˄l?`8tlËˏD CےH#4F|IKkq7>: t(.dtiTtQ3'^{:{:׆()K$}] $J9>A[7| 可~ܛv<<`YAa ^՗b5)k ?Ws_<ɼY~mA D[tC ?ⳟʗ14y>~{$ $=.r <㳪цv|0Ig@ "$IDprh| [Mȫ J0+Pwkr.zeҒ ;l?~F5[߃ PUΥ na2_QaSt\cJU7X*ߴs "Y^$+ނ] 2(RoahgYYAדl|SOK{+,dvA=PeRx\_PVVIPwԬUZ`=ẺLi؄w*0/:6ët?̽*)=~u k$$g(4TUjī}$ .wgi .<eb67nd qcsUZg]}ӵkx* ^yywUZqrp XlוUM9ӵSUNյ+vZaύ NJu%xH!xsc*W&bJ2nvck ffd˗$ _:,Cſz ts&ӻ|<[f?oy ok_ |2V|ׇ^ĐW?:Xe}P|6ZX#gQU"gA`N:9 ֕HKjI#VZQ;G)TDˌ(%M<-D,tҒEo|ixˈ)yX H::S\wTuLuTpE3; Q!m=^&;v ź]x`T0BI΍=i!Gbyx$yB xB-M~9 ze.%N2sZ4y)`0n[3h@{hsGjf.6SUW,0>zR$0u Dلb<#Eg1CLJd_0HX;@辐g@#DScez ,$=o֤C+$Rw%o gp` IIyBwb PJȖ=IHޑ9xβ5VeSt#qV qݝ`\oOO3v@O"˧ $[5 I ov@*Bt' 6?=1@?4:i-Kk6 X8]&aRl=l8)jǎl}tf]z00; Cs¶m^s>9 ʍִ;qEgO Dy<:x.57aII7._E.sG &>vdA)K ,Θ} 5WV?~ 6RH.;ˇwY~?A!k՗Y#LXad3(qF{086>k/qE rpB彐o :@ ʫ$k-vTff\0M>c:- nZwt] a:)>cu7R[c{E1&1_X}b9ĪfhAhƺJ]3^YJs0bV8-l"y코^(0GS`DchQ+h5Uǣk{u͐"],%N1B UK4g)Ӊs>?qX3vJ3vŔ\S|0ou0`y&gyl34SA41D V>L$`RIL: MYێZͳ?jB>;4u3ԙ_u GQ4W_vЏ7~-ӫp!-J)g5H~zg9 qZ2JEMS#fjnrPTj]̋WM2%yD}DE8 w8R8zolnȝ7n~cgg5R- =SmEI可~fP*pde3X[dg/ߌGazN/8B_V`*HVP/arXz8?@c~ JܞbCi6 Cq@k;+۱#sq2kݫfa* DTH|g/[h̄-g25`~Ǹ㌱淫!!+K+,OM8Q,`ccz$t]Yk%g6#|3~f zf*Ls+EÁ!9C8~Nl嵺ݣKH&ﭹBO|zޥeBŢżVH.eǬTDy!zl|sW6 -\3Oտ U- L)Us~|\ )6JF[],|QA#0> ~y>]rx\R&X%y*6Zx =cAqLWܪXyD(wQ"{!0}3jILbmN!} BU= š6a &2TmlmSx2L"ɸS 7H3KDU=L)f4 <ٝےmc.ߗ_kPۘ?F(N?8MJ%+N'DYünbJV{(4=̟Ԋt`*PGzViaocANY>SX>I2xa' "%"ce%!ŠX8w046p OP Œ&D (7RyX'̑f Y& ̑MLTQ*|Yb̕T/PQa[9*ʼn'%%xVS)%IuV2V ec`AHy)\;@+X"DI"iaJ͑Z!cvȎdv;Fp$:2%.P8+=Ӭ*0҂J+J+|Nocޥ#g-Ry4.{+V4z1|%+xIa1!))892SRH1~:t#@ҡ1sﻑ:uTu{7vH1RKwd bUsjm"'&6$qRhDo=g4Q10,P,!)3E%7{g%^ƤNXZ';_S%P^;Fă3JNpQzȱJV)!,2Dca(1#y"2hnq*spŘ :om3uZk&͸1{,v”>dH#vfu[kR۝ uPV7)u7Eofţ]zu +ab5 4 I4 D4-lʈj #3Ql*m<0䨅bU .XQ+"R Ғ іD:.6r:!&K4eK8!L",;D*մ J#b(,361$Ā`~Kh9Ng΂iϸrG"޽%gRQ^p&tc%"fZJR3`OzqG^=ce8k;!ZȃkqΫʴKfxQ.SaZkj>|xb471ĭXXZ@}IGW P*63&UlRr`؀鮕qdTHI1L Lf賺SԿ6D8!-)hjD)Q&Z$ib8Hi,wJm[`:"Bkx#p[YÛ@Kh%h QF$B+s&nO0H8i!&QsMYP!}V 7lWތ}+-C 3Ҷs&ܷק-w|C&RYsg .=F3Tm0Oa/Z3mywL3po9&jB  >ۤ1SмԊmnmDȝ؀1PgC\Pn5*}˾פ Lr[_U' ry'XٺG7E4X6~hgH3$-V/&.dM>DV6^T R 5N1;Q %2ڷ C 70obzln&ei`/~^M:׍S2eOrIPLV[Dɹ͇4$f|&I 7jmST0qOv+X}tDCpug+Вv ],4Z A=K lpˁ `,RF`r1m7j.mY5ښ 3]2ϽJ<:xc>,AWޤSRy🗴bg ?GEdkwY9Nb4& MG\>W$-ΣO.Xj|0eW}>skizϴTX-m&y_aX7rmAqĆF/'t OElφA,odyr̔S҇1 \õ/_?,S1ċJ9ܼD4/c n3 bWK.gwsJl)7+TSooPI\͆8+ʷoo] ҳb|oxOv:ڎrM?ݠA =G'jzrJgbӵ>F p>\ ]{Q^)f*[~bt8 "gPA%6s7 Vq'Q}h0]Ās5 @  ov 0D9)1$;e*ѽNkycJ`v|IJ!߾?,~f4޽Y>gRt,3 `ڍ]4!Ɣ|wNp• S?˸.>2rs2Wڜ.0d7Id~`hTReH8@knL o9u?n5T}a%9`ތmnMpMԒ?tYamׁޟhGV+O{p`L-aڧ_K?4W5o_0rHB63([`]-!Unb-"'[-U}gV 4,5y42Z `i") ^)&AUK'ѫHh# Ao6D-#\2SsF-Y TbbI+B藩rc,B<} Uu>'ߑObm!+lYȭ3za4 ]TX(crM7~լdqtq-q3J[+{e2?멾(zs.oG'o ZeJ3*ɧMir ?c6he62`!'`Ai?nz|sĕ3 *:v4#tCX.}}D;JPi Gk_RD$ڒ91A#dHW+*H_w g$P{%0T0.NEl0j9_f#mSgX˚}5G(P'mDy#0q` fI`eF /u|%9ų"Ns>}ՓbV y~(Dt6d⣛_R˒_h~5jx9Ʒ, F9E6k1ٽJDg&07}!حY" ?އ#xW۶/棱gf:hiX\gkW b `>]S1ҕnU; SB/MVzn$GQp_cHK#iP|Mk*!Ĉn`&X=Wyuy ͌+ƅ rϝdp\ JHb6)1X@e ldov2M2gLF~: f *,SfrpmTKq1BIZ-EfLD9lKt cchfS+mFByȪFi2bm BŦGbQa%v,G KCZe愑 Y(c[XD$b1+m1\|hZzXOTn.w؅IOfc> 5 >T>([:&an GA.xT{q'q9zM._VNkTEvon>;(b)yT|{d|~x8dB~хxGAGS{)CɆD9vm!`n$`q~M|RD;KH(z}]}izy/Kz`KYV;2ptmq߀k wǘP"ؙ($`cOYtֻ3zx_;8ƌnYe#M?exa6>Y~P_YRԉs>ſțpcwYUI:B^-z_Mo{&r=)ʯ\X?ྛ/vy@~7NhwhR;s=̪w+>=o뛍tǢ[Lϸda~'a҃c+7f wy(ZژH}R?k(]EFPnd~|]^ALKX CLwÅ.#&81&Dĩ:B#i_IlmIy$3*2:u,MՋ58H1mD| /,澌)Ёl4 0b<]%q2["+usTB +o:~SϨݟ, %m~|'UYRaĆ>Vv.qlȲo2%#~x׃ .6f-Kd5X&P9,Xy&1bd1+FmbT˽ԶKxfB(0VۗQ u)rWXp3eT,|u$c ry)o4qojn!1wv75d!RJknJ]`}D$Eݣ7RdF2"#3Zl]NU~(`懼\}#&lLXp]hK_n[˾.Jطkul#U^N=>{hgЮ(;LpA?98E~䗾f6Yknk k FsY Xv?HW'u1SUWxof{XHNY;Lm}l[bh؏ h6ӷp.`?0fH댏?g 癢ejkq9+;*xtW7."${k>pHI|ap峲|4+"$f! v0:= ڋ]r q\'= s)qŠZKwHh?)nԌ"%(2XtzzF,Lq`ii#,vXꋱi+zYJiIVjF?wM?"(FO_n59Zt-G:XюGRSGzi,>KQK}uͬգ_zU5*:XSUofA+:ՍӦmn@ջl%u~Y7hlʼ_m %&eu,=ikvn}OJL^UW0I_鳖F9P1yeTV)w,Ld(,DQpQyMs}%\b0g ߪoF2ɋߧRz&[SKDWu]s8;gd+etpk|oБjסvn/W:Ǘ| JPq!hT(mxWH Yh)U46<q *Y=ݍVG쾮^WOޏ</^ yگ֪q&d׮Wfv}v}v}5)́vgLvg?jxVaZHE*r֚"CydҚ\⠻4gda6pzQVi5ewE1V<P*ţ!C"yJ, wJ˜(dž9^tĀ$n0WEh\AﴤH@T0JITi'+;ͤb5I櫷_}tL29~ωǴ)@iQZؐ3G8$w X}Xjp"vݺ;e \`- `Pj),e;!3ft8~dVTvv,B\w68&Lr8LZKϳ1ſ^op]GP+?<渮MikJ03%(on@;El7Uc~^ {TSb9oyC,Pzٙ{# i< >AfN بIiF%#z%e_k1M-ޓBG=v5<KF8책6"+u 0B^ARoq<-Iw dTlf¸D*ʈu n ojTr̠Qqn3e [DeI!rHHڃަ-gݐ3AVxϽ Z99 ީC FTpҾ*gafhdwpgkx=)^t^Vkb,DnK,ԌMe4G+5G1jqZG)Ý,f+oVp[2o8U#^'ٕݞ=͵61%\5\{|BN>a'fO.׎jJ oN&YZ0.Ca"!9fq}IKY.yQ{9!]јw r:~oGADF n%]_vdž9$ǁ$ei>.>?<%hZg"6>$AR}ЅooSǿOayDO$8bIuR ,*@:p5bAI/V2 T `_Q_xG1fPF%*CuՁEO7ܑ!NRA 5NgD ä"ז6bKm&yT@J8E&e-"LĹM l(^ENLtۀמ> Ӈb80r@Cyԉpa6=j8$t; ̂0װL m;>frŀB2jVMo_~6v&6UdJdlˣEJZؐ3G8$Wm1+A25DzH@ȶӞۘwK^ޗ'h(J8=_ l|HG'i ``p+HRMZchޛ#&TwFc4N'T-82k9ڱF%unܩ6G׆ Җli{m&s#Nt&AI4I{K#&4NQspѲjFdX )ҰZcԈq h3`oE/C6"D2ll ˜1y{xČ$n6*=bJ~=v%x[ڡ zI (`lPh ^=>G1* PbyosfKI4_u{/ʞn7eG|O*ұG63N5dj0vǸ kt9@ zSE$,G);҅ΥDL̥DY)<=;"}C&9cl}o`0 wxyAatnn#~HG4 2^!Dkt^;80bfXޗfhizX9S<)w!S 6Zqo@iu{zb`sךaۣa|O73^<C٠X>ЬRs %› )=ι|: :yn]B4-:<4P}I;Qz*efp(Y|OGԯJD:pLZHΓtD tk8l<]qp^a*in,A.k5s4IKr)FW·!6gnZ /yat9#xٿK%O Ԓ+N 18QAlIr)3mi2ż|I:pL8>̤:O.ꉥzNhL.> C>G>IćT>ܜo*F5eΈwC7O3%l;gefXŴu5bd7r49|́mz̼uf& []i躌 z~W˸D]HwdN66@Fox} d|ҨS`o%vpO챝^Y=A -\f!_B01u_wc>'cO"}**yO+{i,,fZ? H̓bfweim8a٤H0߮z_fw')NNJ@_0癢 xaKU,)v !4T fog{ 5ua./g?g?,-y1/eGæJ7l~?[ϳҿ1{(fU2/G9y5SR4_w: B?BjKK S$r)D\Z5/~JXxZtSֈ-kw]G-kv!wNʏ.*A=H1XZ%z== zBz0XV˸fiqԾF\Bl^)F̻}Ź-׸Uũ#;Dnمf Ӎ@>zW1Ne3ѧe9aeK`HE>M+mPb ⯉(՝|zV,VУ;+ۗqYBtM4m4 pC;1ei]ZAe\D@q=9rZEfx)Ul \0Fcp 8Q"Ooe zK (`DV˸DJ:bRG4G+ Gc9Z-ڞu!wx|e-Bk3wΏ"}0Jt@E䀎8"^ҩs"2=7'1/:tc>fNVv:a ώu\(|vaAsJq8Jm֌Ipy JeAɜ o}M gw7]ED*7UPe\Ѷ e͈+=&3O0^Zx\ev7jYU4mT'{xf[}5zK: Fq3b|/c[3;\]iPFԡIM=EYJC:'hA:pԽ?s4{{ UPU&Cr?r\ڵPGk.؂A'Ou2f`'(ui<##q+Pi3z?ey לYA)=-|rTWQN#{5׉{n=|iOds6E@sԭ8P51iqYW𢆏EUdvx!%|,))R0Wvb.>]u@Om;kaqhk"CKv\2rǣ544jHDMC q| D]/7CdʇO7gWPo CDY d_Җܫc A  |5_^ &W"RPXLqۡtBGCAC5|, }WUմts*-gVP=Q8jǣ=E͢E= #hPY|Th@ )< 5gliua 煵"Ag\U$(ScT҆Юr.#!7TaP461YY]*N@o`g00%cE7alТ$ "ƐcRĖ`$އl5f#ä(dـcK?AA7RoIo`_aU&) Ci >}a*4/6pv9~Hf%#c"wzAc.EWer#F7C) %J:QK T(SKwYUM*n1sjwCcQ?VN18dt+xzksݵ7O^d)#Ylcg7N/*ΥAD :UɈƊ!(@մ G[=5|,r^l. ,_P`ѷ5Vh;~>r/Lxs3+8 mE43Z𪌶qVC~c!*+Q#F$R[&`"v@/c~M<48'x (vJGPtsu~(/uo*q~҄.]$^/adрsRaD # SȚt箈箨c|5(eG*wA˫  An~6oW).v= @ꢊqm!,+ I%+)m9&{1cV+J~ ᑳ,a׽ά[XK(u*sZɯ" q\|Ϲ Z0z1| b )#zyx03G Pˤ8bF3;muhTRl.SĹXr 08rl9hwqz,(Gz(-, 9R3P^CR^j>û 1=`wF*`;mJnJKJs̟EE?_(@]\nuJ1 =</)PQ=Cr:Zs.do30+HOޓ o5CNPn9{ItBֱnfJOz]Mlr e5dxj~Tyx#gsh~1g_k=r(B)a6\E]аlj;U^Wu 3|2M$0!ãwFF7{]1|כrP=<2>cg@(T*@ S{7m=s#{NKhvaCuP'3>)gL'2oЃrT1|v *]݀' vE%\ـ8VT֋VZze(=W7b.PG9{xdfPYم vasNG6z5.Dp Q~QAv+$olL @KJ"bԁYZoV# `ؖY\B0O>UE78xX8֕ɜ\ڍ@ nCЩ_'5p 66O\,.wo,+j-5*,*eMSW}pˣyD<\f*msZdX׆I!lZMU*TϸH+YQHR;T-f3ՠesHU0"B܊8AJk!,;u"{0"~Oo:}cc ՐҘb1Ie2/"\xqi@La|R\\ UFH`~D\"`A4Z]*DRRS+w%>[P͚55Eii/yg$zPRE`hM++r#=9 l Xlӎ^|6GEiݐ߿t,Vͳݕ\A:--#  O:Cj.w*h;1NK'b4ߔW`c}̄r^ǹ''/a5i[).c:znzfׯmF.ms+iEeJNqqBkʊ:aHMM,`& +"fsyݙ* Ӭ͋I%7Xf[m?1VxCH%V|ڇ"-$WMwმW SI7v49fN[U_H(Q!k%l<5@Pt)٧|_^;}u2 r}3kggk~3x3|8qc-![7w:ĉ*=0~,&wKoxGS{5MxZ۵QuC`nBC7Pþn]]? g7'wS'*@)V( rļ_}ŅPq?):4> \kOfTeZ?-/e?;z@Φ_!3}ϻَ6O|Jl>| h%Z!|ןl/t V[{M*?g[`.j=?wVͻW ?GμlgR7-P0`ngM}ɢ]aۂ+;ⓝ7ϻ^HyTH|w}nM\xq`(m@.]GBHIS-'C?7ezwHeP2ڌRkٵ S^ރ`,/Ibx=͍Րv=Sb#q[_ 01H g.[<8(R?ẝV~_%ia*'[rϝ(3oC3&ֵ2p^ۘ.rXJ]QNjcpv<҂#?M%,>IJƒWCN]zx$Gg땿_0 HpV1hMÎGbp Wu74'P^)6#:jR촞=2xU1ij;:  1[0%d [$U).z4&hyӟ7Ay\w 83'm KpNgA8CZj:Lh#`I&|A"3AɆlb`nNj 9]UƊJynm "G7Ans~ZP=z~u57GJp1m߯|7^w]}xOwpuG]Ve |xK*CwBջOT%=~dK?/vv#M.RONT9v9K v.QrKs}OЄ"MH0 n '@;}Y\ 5z=OC$XhGo/Q:K@hF iKB)X]EH47̭ +CMcTm 貹/vT1(u8pu3o 5z|_A6LkA,/'!}Rx$aUSE|w7g i injN|2mN]2z9Wz|yJΊz7 +>AMtwH]^?` {{d@n"zD$/qLɌh|x.ʋ9^ݾO=c'9 >}Սt#7b~sAs`(Jg +Rr2:/=6܉2?7?TC,CF.ZhLk7%*]nIB \PL-O@P!!Oi18f+Z <7,U\FP-TF q2Z$ Wg-'R<\xĮB9bHr0 Զw}L/Okc#Pq7N bh2'Y.+4]f[Bt !5V-*# hh˟bN4 FmB״ Wo% PDoIfMtˌZoJK{Uz6v'.{t[eBx<)D|k`mGuaf+cJ_Wk*_?!Dt-^-: 4FT.>x{w{ 9P" DWfnOrY"$k˓z7IB'jUoF=椪2H<#ɒȜ:̑+C`"~&)~st=uօdVihn\޽c(17 oL> ͔ W};u(D~N?,vBgZF.+y;vɶzi2*#ŭ3S|2K q3ujzug׳֡|/2J>60īaIzh IYW{2Li w)w ]HOxPaYŏFcDn&PCܐMoF&ηUþXax SL"BeޫDw,K+`Oᇛ?Pfיg6b g*6]_ r {ƖZψ̌".10Ћ-\E6ԭy1bE6b a )5˂qvCTBc!3bKCO⣇a ^r6gb?8|*f_"|eGq eG cjj~1c54Kn3"tXgMPyml@8?g3gM)8blA0)tkJ.׼9~:b0^m-ۦr֋-X 5P~1ѨDHhF,^%/)-{Z+* OO+q>#)Gmg0kܞ~2=U֧rSg}r}ګj╯ng)xX Z*IV#'{nQ萴dnJruU~jui ᓞ_!} xeP ц/_45 "rL.Cj92b Q$fw2D/NFـ&iF iP+XNJ-ɬgN O!>AnlNmAWZHP"ך 'rY3| Puf~u(JQm|>;BK3LYҖ9z^1 FgE؜t'[(!:ꖦ o/2(7S{~ĤNZT+yX?~vb%'c%u41~ORNjɰ0(@}1j9G=(Oh6wdmqyC!x;I_',W:TۿMy^2MWvn/oo.*PSĨpY)5+x. c}rⷭW혪öuT7l[?cZwai֔Q_{9 /I6DT/y^XpHFc4!Cj:XGM3eIɔ%&-eh/ciP?SJ _#& ٟ+R<7$/˾HCF(1t^^^yz0*LcB{;|0E˕p*$Y*%"ho_PTS*T!+b O*f@Agu*[]Wtϲa;LbԐ,:I)&dAhtJä楺lv]a'x+,sa:.WN[EU˙¸n}W2Lë"VF/zOJde0ˠ V产^Yw@jv 5_7B v@j163NV3LN!EvRuR+WO3[67Q}+g$!L'+/+\uv 73G%g}#rۜU|due;$fDʠJ(2j$2AFZDxOfRv]>xčVi[ )ɮvPa=4]nNjiKD_L2ܺR ~,6m BF!P8C9".@x˜+ӦJӲTίoCr^'sC[IpQė$,˔\e/BaCGxRP̀7AZ&MFA79GM*hׯiQZ3-Dg 5oΟ*DoSYn&??wg8S倉hJSi}X=/& 5/% j6U:; !XZ}IA .%eN\xJ tb]+x2ꍳRO3 oϟst3#!D9; ԇ4喔EPtrˏNG!ӫc+zvdg"g&*f祿n:$F*01'C\V*J~0tW |EA w){R5(b q: /x ^ @՜<{`C~ct̪F9r~:}Gx {K]䬝P=sܥn0({z'w.T$MJŮSnby|J5%W7/|i Fy,Gn⿾v:,y8k{/JP| A|YC$U2`tc(O N~`S~UgTggrr*|Gsbj,|bU~4קOwipogEC4()ݓc.O"ITJ1jTUxbeemʯtwm=ny9@8e`wpnd{k$Yj^b:#i--'0 TE~]d]XU/KǾ<ó_qn_V|޵3>87ogvN j?\d&/mc/-+$mΆDžwrv|)Wqp+ wexzίw}ã[%>Ʋnm /k;++S 6z3 ؎U'2 f"(0 :!WZƈT$no@ZuM3\_0Cn,50Y4?< > ]:A[bbRmNf2M9v>+V8`rTC0|egx7̺I^ڧz#!''O6c&lI& BTrԜȚגs(v"FkjK߳%̖|*h$ɣyvMrgTE.O-P~\nvȑ9'|bZNQg#uV]d:=.^^xH-wQo9ph0;\|3>0󇡦}z`PsI"mM*ar3cW< # 9Զ 9WBqct`H< p[0>Q# 3{*rR'1~mNIjIB5[ҴGa.2XE#NX@Ub۠8C âd EadR%v[1Jx1;>7/UY]X#,?2Սo: &]tGap2XΕP]^{q, [i0*B"V"e4 `8C0R^MOU#ݡ [1Eg[냮1iWTx'φ8C)X" +TŢ6cf0¸qja=g$$_.Py_14CݴMZ@T *Um"B+'!}#^W0p?^)BbLꍶp1}9[jcU<Ċc! #GyG BN06;£08A׻_7I![9)|,( 4Q3tdaSq:IU1"JkTUsR}(ca̾~rQΛ( ug._-;}&URŔ#< SƑ)Xl?}xe2.sD't?I-:zSax5">RD ňUkGa~GapdxY{FuӴ#|?^aZ}!!GV}IJ~;£084H*3pT~Gaph9%esfh)#+,Hb ֈ[Vu@u֛k4C郞Ij_兠s>$Eˎ( RGßԤc#< c ;ݡ~yI=^eesMz݁{W<ʂðDjcQU`[[kgLW@ yآ=^.O8S14n%Txa82нH#=KFt]rbvd֩dVGBX݃ѪV(_sn1e\84?:̎ ;aD-\XШNzg SĽn1Zw?d9_ZBaw l>{ӆ 3aI%JZ1r[q]US`fL;+ mٞo\qn(0 vfgyD Yy~k7[NÊ)ZIg6np8OpC pl/BPa<l/<(,rp&>[Z`w6 Zoգr[Y?kQ5K~wet1Տ<#ׯ/D޻vDs &V^Ǒ KO3}P׉ ц֊"Vt* nE t1G@`VhR 5Z1]ֵeTO{ACR*mbLFv<^R{=ϼV rRB]AajVʠe5"tl8,%ƸR&/06BYBj%9ptt刮#CZicG kUmmσDʈз5ʓU+Ԍ5-1m%U4fa$' x #DYkZoh}}Cu;7lC "R_rE iyfE{z ݾ2K]6 W`Pz)?1bfz plG60qn13X*UoN ^daY6a WQHeMfm<"T]a5SpL͏#!?6n:x՛k\pPoψ֩Gg"uk`2ǀͯmpj= !3^[U"ΰGF Ԋ`(wcv5F^?f>̮:G\ƝAQ0wmK_Kb AdElo+iqM Iv[=cD 9%y X9GwWh~@,Uۿpq ۳dA?%/NUm|2دŚh}K TQ6[cR;ĬW逰  uTD{iCvT녢z :Uђycy _r-k~[rh%Cqp="<7#Paј0 Fxem;ȷ[XbR)EJLnL )&?V<IK+FOod=lL;aqbh;p1ȽwSgY?f*t :] T[]!ucqe{Ao 9s˭#:F lO ]eGcq&&VQR6ƌƁZ 1b"iZ+I@\; *=-*nt陻v hO^ f_P,V` ,|J$6཮Xa,lI۠A2 @WP @#@돹.)};/c^rÜ'D)z 'Xr"!n6+W1(j@[KQ[f?Hj(^S~Oy]l1٫3@KxhCTC fmqN_3oE4ZX?qRW'~?剟*?uim&zX<N9DŽRn&x9)x#"( -J))$ێp(fGwAsv(590=Ge[?BzߡZ!evYG].h:e$:eD첎vYG].hu:uJVuf:LUnxFx9\{3 NHQT_(6'xlN<=d r3=D=KhVE dБ - d4K%uwi/ ^|ym[  ގN[lB~!B}w9.lnCw<~8(S*ִyb|4Ebs65*+(S\`_M/u) /Fkt4zqC?ANA+0W]g]viL˻Œc0cqtqt4iI[7dا{ &(3v$ΖolW3dw3偕fևOOo2}^zisx6\\38v[] rWʙDj$fImqgr'ƿ_}i6yKӡˆrS`o@:~zɋWO0Q'<>y {;*j.vP!ǿܢiVުiXjsK7h6ݫVgV.ЭMn!@~9b_lnݝgiUjc5/&ؗg~t׳"<@I͑Z!ת[ieUfٷ55䣣rS`rkiY ۞ Yy;6^o IARuU%,tU]ڮ*qWJ*qs]UuU]UIvX+vE|"]N"]NUtE|1m$?;X7VnhF_})\LfBl)|җZ8ʾ};qR6&6-zt駗/-;8h+C1AWYOXň&ҲIYzueDLD(dSh,+5/f]ӂtd;S&67~G'˭қ] TH7:pE5{,3%Ie[ݺZX}<KA6<5޼8ǀ_% E־KU+->Z^$UH <&cpmA+bP! }s=!=p!<֟g[>gxWr Xx;]i [,]]F6^d* &T}&y/>]!̻={?kL)lO1ަg۞ɗfI{O my#o,`kӷ㳛H3osA#I#;BVu¶] g)K˦fqafۼ甆ʚTχikҥt/ +V\MцJTT vEsQFT5%n frq\z*W ךI]ZaԋE%z271.?^Y$iЛCٰ\3KG+˗m^v#o7ƾA>)np09sH[ 2qKUZ\!**'tSE&cT(gfp*/~s\ɊY[}7:oy'`Ew`巵n\:Q}.j@D1s-A`ݖ3DVVvIK}(lKIe4Me&Lym-6[=ė)au+:*і0׉t{~2ބyC{HjA$ûq/Uwu~O BͣY4J"9aNnl6Y`h8gxgЀ=%SR&*dq$D)r@"G?ϓ4~~óA.q*yP>Qhp%7ih 5(O+L}w4_ •fa AJU eDDy9j#|ԖYĕ֧\R q)Ҏ}!0PkQ*IloBRI=8R %Q9/ud8/,F"@QI*#q:wTq'xѩn;L#k.P T'ȥ1z3(Ǖpҁ(Հ,4]H,Gf &$(I95yDis~hJ,s2sɭ`r&40kVT]0,#v.85cpi@5cbR jҡ4Oƒr$R=5qQ2lK F ,]0D9P62A4UU5I9IqLm;<,0=JNord9we:9$2K ȧ+/"iv;I}ԃf4PIp0#5V四߄g0i3M&D!yrpCe0[)@rB|\ג3r|TQV~^lG3[Ϗ Rbr7P_S[9с?z.l?@5/TO>b~%hܣ;*W_"vr#o2Lҙ(\Q ώBVӥ:7 #$ #5jՊ>ib@ox[NZwnNJL<[KKlTTPTPc SXRD3GI=s]j >֔~$H ED4$'ySiaG+A9YI#>8| BN8` :q)%|t:YLpaјI`"=jBrPK}2.5e`B4P$`8XR 3Xay6R"g= KRvȻ/hנu`У9¿ͫo!UA#=~UTR*V!p^,Tn#,F:^@r7X0T }wnppK]vO?[!#vI]1;i[B_9Xv5NT[߻si?fj"PÂ_ r3,91a/&|J*c=zpΐJ4AgZۺ_/UL~F+,.ޱ Ady;gKsydx,YR&'-KszՆz3$&4Rrq:W&0 FI`QB ``pw.&#Ck7T0#)F"Q浧4L H ^1%!qG^ \W^LZ 1l7IKERR06&(B@(*Րܐ/w;*W?&];B:JCp3Jel%I8mYz|y"9g0ǟt6Ow4`ǹ旿i>a}٩Z~j|$ՑtkҸYb.37hG$7I.{%@d'xξM9G̠wjG8:!sByt\=\AZ 7|ywwYc>%b>! ޞ_ ne.zrr<=A1H9=MKk?]ۿ%`G3Qo`_Zr9HL| ?exb00y>t6x[fv騪?]NW ~=×u=)'.Vt,+0vuo4<2Utpͽ΅\:YWX/zw qn#c/'1mfԽ|_5tCt׮v?y_?ûW߼#T;7? `i^u!s]p뿝tMyL]S]Rلɒ"U ~r[bmW`U/S~u5.܄̻~D;j+%dͯ&}d=>mt #H2橨4lPGH]xav P|$scQKT%ZQ܎(=qVBgW>"px8E'xo˯@P@HI@/gR =p8? -"ןA3`5ZHfЪAg:͟~~?Z>4hMΔ' XzQBwY;] {P5}^tšt^Z%-Vi1EhX Lj~摶S7Bfrf _N.($շ3Gwg׵PRpjo<^qv`9lѹݭoBdQJ;+>ow%5`;ޤcQ;/v#+jDKaLt<"G.Ӭ&iΖ|2T;\ß_N ‰U % 4tQ'͒S8z^́ΒgXn=Wx \4ЯH w6KӢ{5D}? ӧ"&h{Zyj߳_ЍTIՖ28ux-< ,k$SbGz['ַ}QQ>Ի(? q;:I  Sn02DŔϥ{KAl!$zX-v>Qbʳ"wkzKGWm_}aVt[[_l;ۭ/f+3jt6C]a<u6n l[3}K7ƻF'۴|=x[ضw^bD 8b땉حzLn:rG.kZmN ڲ~go.(ʓu].z3k3* pj8@m O-l ݭ(wI'1K9[>m,yJ!,\pPM7!r|#U0)}zoA2rJ/R8KL [Q B!_nwnQ[c#"K*a9e FD:Ct)iMX\6) E7XvTf8n顟ėmvYe̾Vyc3͛tSpe@?|!70p'(5?Iet9^ ;0 hY4~.+} 9/.KQU{Kh=+oRVM9BwXDC\5v/{=Y`vPS beg]BՕ8=v1Dg=؄jȘXBH-#Cy+H7Z9urܧY߻`762G,:*CDd2E- 㴲& /0N(0U0&13Y(i xh6 LɀbT91L16᷷PDxK-^jϹL(!n+hVҠ|J/UT\ gdq0r,PBx kHf1l؀̅OKM-^؞,"|옐x 9Imoo~+Tb'!&*"Lٻ߸nc,#34N\ ::Jcuvh gErgyqc2?a&[gor b \2BQKZY^EMxilrđs~-793Mp=fXWMhKlr #4!ީ; :1(U|2D10EP0ЂxA)Jo0୕ FZڳ5UM%TXIi^,\cf8mOe`5/=jOɳ&c^FhB3AeeI`HZ&s!JN J\)SFh@< QxFm 2p~*rvrpHހV[%W4X+F^„._py1j磴j}mfnv<[ڊU֐ 8fi b#NFlG|SFhA<p͉' ,{i5S"X7z vO7d~LsVE/Q 5Ђxtg_DeUO5,-=Q0`M'ל=K؎IqmxZhƈ0B -L5]3 ~ ÆZOvL#6FEbbUT*1jFhA[/oCRQ} ( Ut$(G~ wB4O QbGiH+c6z7Fψ1A%AMəq 2B ⥱T!0 ck!J+Qxh=9e+<}+ؘX5A؂jA,U M <a!!{`pZ y%H(ǔZ`Z~BRཉ&b,CFWx m$Yw-<8V+$KAӈU7e&Wlb.53m 5[,%eF@ΔZ_wZPx/% tzɲYbl6d~-a1[\wuM9[dUm$m*Fd~-GZk\R S VÌ8d,*?a&z jnQ2W;MvꦌЂx+['G7hL V:d)#lO1FÝs~qqtӳleo.jq-TKRmJXl7e6{l76{lz0O)Pς{(8_҅B|ڻJ+}!/_{>~QJ;,J{ޫ*J{ޫɶ^X́ޭr /O fБ_HT9͠o#${5 V8T-xj}6ZG>\kٌϟs6s8a}!Si-b/:,W\v@RT)lr[(K^KV+d=:@枌Ir&FFQ!H2'b܈*y9=p.^릇ӓÃqMރ[N܁uBuguMy?= ݺѲ 27a zl?y0|} Jo%<*-LͲ(KV?%Dɇ!I:"sM^TMeE2!@.Fh٦<̠ ,ɉJ%ew$E&[Ӝ1rR*~ e 6N&Rgqd"D@(rDRlk W*tBa8|y⣔(3BW $BNa񾦚$>"DZjCa.X4Ed9dr$i ]vHj_ٮ"?U҂stq0eO^Hop>;W~f_Ƕx-=}N2B77 |݁T|CA1Ko 7Y28OTmZT=Mk;c&5e >NfEƘc2.eoQ$ʇmJ8Qj2xDRFC"'\!VKVPu,d3A">Phhqdu'T;&qqN?qAݰ9i69'ߎ3EHGǎ !|6t`[) :H:䖼_n\B,φzܴ,[|Is~;ܹBcz( $Rl\iBˎ2ܛ/!]eu5 wuu t_Ͼ0е~vLH JφnlKhr1Xo$ja`PΤzdy#{I!]I!h4'Q1%$S5L4)']BePPxˢ-M#/kjkij5D#xƪvb}J9K7?/=u@n~ݴ=>B̒sd.eP ?ʱ$G+.M}%Kj~+#]'Ua֌z<+VKu)* @I"$/ӆ_nYfς-*ȋoPpTy\s wRBԛJ: Sڋ SD*]{7Ƭ;? ^ߒ XJ҂8]LY h%V;] Ҧ񷶅MVh .WW ºl ZQ7PJ"tѪk#F2ѻngE|z5*O}y1[>ggoY&*W5RY*WŌhNbofq9.Y[ {d[6[?r. B2$kLIbC:uHw*ޣ^q(~ܛ<ˇn|v7YY(K,E-`0 |r\PY,7}v]VHwBKi߮'>4~[xg~DXl˫+^??:Q5t$-V8[9|orK-ĦPnIg:4<Yg7,),)~r|Eϯ<8:88'6ó^iMVΫ?_H0,֡/yvRfUm=9;Ysϻ.n2OrY<8'VG{y^}T~W|y5nhMW=v jrܙo]yOlK7{W|.2.]qu.Nj:N̗Wʼ>9:iO 8]lTUq_e-,퉗C,V۫r UI "(V\ȱIOmyB|~E\ĖGuڵDmdڈrKi|9&kK )8!Rvl ZZZ8:t2I=/@ಖ2Es!AXm=?2 {aB1P/2I*@lvFLcT$s|LrF5gչC4Zَ#li>' DTZ)av \"SM̋(dFΒ4eQ$SFtl҂ dm rl9lJjvi0*?ޕ"49E"_оV;4_ P"R~ZRQOOh pVȌ\1ѤdHN^ jl4YPPJZV,`!|^)<' 䁜M~PW9Ófm(D*( Dm`K #JywMo NKR'ՉzXFqsNk Jd|Zvw3 '{S֣|o׏n~7_oOI,ݟ'Vp7*=:Rj~gttr|za0,A:4^HS~ԇ6 `2RJS:Ld t%= vhgJ!Jhi z.}R+D!3A<".<.C;iSmIs71#!G6 3?//=?Le[Ex 4|)= ƳLmҞI)Nc .hi7dYJS8a:\4pU"zp<ǥsi=76r<4s& DRBHψ9?&<#os˩g7|Dx =pL֫=ӆVwQc7xQ%3Q#sBQz |qpAy6 Ӡ fC#(mRHuRQL48G|< Eu5iOTI2 J<\g+ؠ6\4#U2CIF`ֆvQZl<:/+"NW^OS P䧞4} c/|m-+`? < q@%cA rO_}/NV[uYD%^OדhVPpwdIiɠ /dɞ@ͥPoͣJOA15Qh}O}ڜ8'~ԈwJ'ql'Yq狜4,^]ZX]_O[}nuup dVL 8Ĵ Us*ݾ΄Y5ݤ"cyk4 gOYNX-Z//Oq/ zL34I5p^_#.ARwŊ뽉]сU x pnYkF{/VU Gq' ;) Ҟ".ER$7i%,ڬɜJ#\ͷH}E׋MB:7 I+֩F-' T6tٗJjbDp≖e ޒi LV?T`zrlg\em0Q6laBe,iYaHikoVfBXDn_`~Kd=ڐ\yU $׸(vuZ{F;y1}N|5]%ZKmMV|)%۵/W+N+|rɛׅ?!0|"C&]ɮbP{+FiEU׍Nwmhxթ閐->n]nA\Hk`~al6"8q2l3;n+ ?zymثٷu7|7Kmzk`@yJoKE{m(2[R1 ƴdb\Lru}n*{(d+ʥj*6v:S?ۅG+]猫6ڜ6w`_dv}Ot['&noǛP<4O'4=J JTq*5RMQ- w_M PDap?ǩqPbmT('~/Њ;ς[H<FG>_88<{r|bi0_NKwa%yJ{+/\TC=":?'oؖK-sT N*Ky V(9ɽO*Ӝe)hS#SE0W#6M Lkk`1 iQq9@[+ݖHI '}XM0ĸX 1x}TLA(:?DkOBTVp ` QP]d0>8pe O"U01RKyF'T"Y%0"Q&ʤd7RD,NPq}yF&e)ܮgHY)jdZWjfoM\9RA9WYt}ǹ~ovp'?bq IM~?p0d$]qŐC= hkAhnƤA+ttOM9?()O8j=!Y-(''Jr zgUkȆY~_HFmVVggKŖJNMvg*e[kS'g,6r[S+.ivQQ/WՃ糏NVٚ/1rs+Dp4|Ve=A[.i>lIӍ= Mݰԑ>kY%7ȴ!G|AY_ =sp609C6O{]LrS*h^9 o\HX"m4nBΧ1fqL{yz5F *ucaG$oPy|_yO?7W`V`mȪ]^ Z?tM[]c9]3hso/WmNyC'ߋ,ف_܊8Wtax1֙U,9%̅\qG\WpgK?o߮0~!JY].f|p_L(ݽe4qԈ|FW;O#Ť/. o$d*_Sdy]'Qkߜ(5NFSi.Rg-b"& 1A} (#IH@"r-1)hĜLJ"w¶zGH\a"j֜I-$gJZfF;*qn|pL+dkgyS>+du|֬6Ϛ>)ۂ=۝R#I"́X&2C1)>@ՊzT9I 6v Sݙ?Ϧ:^OlqAK6Vdެ}@`D\6d -%1Ay+Q uL#I2p%Y;>8D"Rd$Yg)҄% ֆp=K1jPEN-YkigRcs㿁PaFKiM^8Ay cF:T":H  u9( "#;s/Cr# rQ P:N+1mɪ9!@ETTUf6ETQ āA"H"a\I8ӝhEpT$kk4D"Y7CUDM{br#Dʜ 2fp!I& \{\b;K;nkDǖs V0_k$+󩖙oD{pULhI.Zhhyr猾H"zA q-# g¨,:h=`C(dW-sAϧ:wmms2ƥ}7TC<\yʆ ѱtK=:XjH \2JBĴ .kmA=6d I_?% ,$g0ec]Ԅ/XZ91%DB+FcHי8wCyi?|Ӫ;/g#(@C9@+NB215Y;b6e:)TK٣QGhP0a92.2vtd<5(]W^/\i2BPGa R\"E"#7[AUŧv5=:K&e@TAAK ^q2I9r (M+")/hs/m-jBpn72m PhJQtA V)WuKo0)>\4ô)sѶm]Pr&RzK0͛W;dLkۏ\mOͫw[HkKH8O+nvI]LE6`$ٯ ~Y@͐dkZYz7Ht#Nm.>Է*[X*:J4B/]$iUzߟqK;7O_l plS?VңmYNtaF!46C1(C>8GdEάzDQA%"r{E!m\j4Ɯo!%mVO3W?h ;`VʺY8g[ug⬖R8K[unq-κY8=Br-*Y=YY*Ye=g=/tdD.*YEIUֳzVY*Ye={PzVq]e=gUֳz}@-5ΡϺsgCgbY& "Ȇ`1BAGMIe}0MQA@% rKB/#KP]-8YQxm_]oԑ#aFHC/8;=G=FfFoooT·!l$d6VhRʨ!X_uE/QlOݡg.F}*1Y>J'UviKry63Ov/W:zSvy|'?U O.T#n5DWCN8PFl82@:h϶Nj:'m_0u-uW}t~%{l6G>q\)M'F/bM9 Iw"Zilp\d6%%Q+ji]'u&+C.8fs5zgp9{a1NzL$(Tщncm*Z'щ+lsmū[z5l /C&t`NXD0ƔZ@jKy LBC6]Dd%sC/z/י ,cM@LGM@m(!kׇ>>Mϳ"N¡`L6J`\I6 6jg!Mlj8wa7y-W.hgsY\R!8fz "ɏ7g iPqˠ7=?3&-$Kk\uGZ~YrK,[9nouu뫒q!Hs) +RGԊ[I'~>?^R?;z~,}_| |f?LkD%_P|n4_Ѵijo4ܠi]z6 .i}ybSnk@z\|y=oi]hS6.gThNWHl2]=ץ.]I8 AlČ~d=>h72>]LDzm[GW(3(ICd1dlIW\hxp)1cZĀJbR'6}{=uXh,F GF29 ˙~e&DV1,2Lt9Z:lUog:Bsud-U5i7K |mN40>\k |J"LF秹bZ&$ ,6(DTB:U_Frs,Γc8q r-q3dFA:Z $|.Q2;C4`Y*E@"Eyt5nD(oMiI+4ɋ`lGt#fFջ=wjA9K]aR6er&Jsm:< % ,H`fuQ;cX/-F1%Dں;,V""(xǀ3q|i?|C;7iZP^HFFIt4I)(Z{F_pm,>!,08lfnO[YR,ىe[VXͪX(C\I*utJ z$BuDUtQ;N:ˌyo۲#A49 1PF(Lccg>EMv.["ȍV5K*,WZdG1tRyI) Ӡ%O8` Ii3Âs2v@2VE~/s{f5OY j݁{rZmy8ǥkn[7 \o1/c1{@0= 1ON$[LXIflE TJY5뫅wpշ 3 ^yK9]cjR7c-*UZ<1Y5=iA}£z>1.ISlzm20we"O:~^Dc<@qo-6 U) TP)u:n"OSI\YZTk$d6 9oȱ5eGoT ђI_lLNq%yɅcdQK\ҥLͻ 酄/ޘэ5Jh&:o˩#~ j:\"~9ϻ W/HŕƱuPA)LІ0Q-aۯPy4A*[O.irJ V^y[eRxEI/* z L9侻QSSFkdFt\  ]rZ4cLWC6w X_'?;fzuOٶְ-ԃ.ac]R[f{ PqQH-{}u&dw^T,OҶۚM5+/;sfJJYI+3ĭA=x@+0R$֨@֥rl-%0tYK`{n1dὡh 79;jCr>)=8jD+sH{}ڛ<"~ؕ܀7|~OFE{BBGSgDꬅ[ɆzKRTݒyX?Ow {>'/;4'o֡4 vk'igդ}=MJIve^ljA^@5oɗno6G?:i4aB4=+WbLv7b Lҟ(P(uP_$r^Ȋ +L"Ah>QlmѨkH4;HMȌK%Y(ѵH"Ցh6[x4#%O!jWZd1$z˘ˆ$-+x)pp"q`]t!U F}^/doP˞IU[U.a3 sƂU,9&Eꥬ^COZhR'$ XTm!A칏V9qII j5¸} u'7Pb A&Υ5D krZLBM)x[ik#B]^t=:klhl_8#Q>|ܚ⠥_kW'W~/M?x̖Cd}s{z6-.8:Aq.ߜ-[5+GjH竆r0= mLAŊG7cvϻ&g Iu\4 磎JwHX"G_Rg;EmcLZC5LO.wBwxFO>/?|_㛟?,Ο pAW=oD߄{C6547P1&g=enr+ƽ/>8ǭ707|vM?&xgY9T&w+D6yS.geaܥ Q%<*Ňr3> T1~64xƛ$!L2HZ6$ +`4<Ȕ mb@`%K){f:fL M%h_0ǂa9ӯ̄Hx*E.GN#:=ؚx!O9yo&=_W_jǮ;a4o9SJ$x$Ű)KDP+x_eD%%(JVHڒ/)T9 jAF)m@m58ȶO͖wp&R R-"t _Ka<$ky:e>,?~[csNwbuΣ)ѥI&kv)n]b&k#zۥ棩 %U7ژlpկf^Z}^69}mumBE&&?J|V^oZQb071mK3#$7󴻓;O5?ꏍĭ '[g׫.OQeYF[nqqi܄~g$ٛZ̈h/d bz#AV-1(\tѭ Z,! 2V/|0Q7(ae,͢&}ڎ>ۈnَ6DVi+WaYOj>o4!bU~5m'fz˻/vt= ^z^w%3^]ha\G64v6kc׀ʊ|+`#{eZ6v8kH*J3Qy]J XVTb^tNk-?DkDA lh%ZR,cd/Repkm\|5cg%3Gnw=9Q'J&)tV}U{Vy.e&&VQR1qV{tHba).Iw{^ o΋G1$]$1}l6.ضXaB=*[]99x鹛\]M%x[J8r5U{dn6K _.:_,R=ő ,e<}>>}r bS1 3l`& `WA*M^,UbKcU4 $X{K[3W w8@@j=pvM[,ʲ1:#*ۓSZX ǹh<0aVE{X_]ʬ_ړby.J?aqf==Lge˾„ڙ-p 4j_G`k6Vۇ44I#g0R^$ |F毡([Ӣoe։r0Ŕr6ȹQjEw9lVJH?B"I5jTK|wxEEm\?\_@^KUF_Pt"J3b[[0D8( (JhCt3.!`UX-zg՘IDk1hFs+uA >gpWTYh"" ])Ju aծ ǬiPwjOzr=+h2[p=eNạ*?i}L%_>NʏrsbewY% W||=;-};WYJd z {4d+^Pd7[}> dzyG$  rkjʥ73RBǹ,.cZcD(mhmk4Y*y7hj-5\"iQgabP dБt/A`ip쥒5OQCN A2 c*5(K)µ38z<ST@[ H|m';IJF>QĄEOGa[ )k9u.`MD/2H@{tB|es6^`TxG)9IE$s2JazQiƝVc  (B' 0x{> *5>-+IY.D`Jg1fD<#$x%-STrCqNRa>2& /,vzԖkG0JgHo.BԚi癴TQ (,N+R5"ui!پhUĐ_._ ̶L}+u8e[yI4O?gũ{X~w[Ю2[GJF5 =f;Y],@i}O L\L^|2.ߏ9y9vA|]zW$Rw",u$ au2u<I$qO`RcgD5y5KIl8.%,?9:wO~zw1Q 80+.W옂ZUϝ/~ ?>ah]0rhX 0nyW1CI"qdv\n@:q7>]6+N~ҁӽ,C: <_i_r:q Ŭ9+M)qW0!b;;;>("ᾠkOt{GJpIgx$0;+X`s̕T^bg<&RVVB(S$݆ ׎kQ ) @A oDa /X"D1H:p ת?ԩP稺׶&^ItKʥ-qU_t%:xj ae1E4DR%n0i1:a8RSmٿ6q:9BgDovܣd`j+^܌ 8o8\᫶/hQq:Xwio:.8$Him,dk@=!irQ6$qWg?GʃYb?4^Rg<[&M"*dJ2>E9WG-ɘ8cTLCqgK1la|QBe4}FMaҞT9#tZ>D._>_U9+Ihk# neթV Nj#_6};5 ]+qu%Q}!|vz [vY0f 1i:-F81َ1%Ui/>2GYCkm{ySe}km얪Iv}anCWO#'he IQ-毰Sr=琀E>M7q(xq[Iݎ귱&G󏰜5kT`S Ĺ/Ow|^SM3ͬ l%Ndс#oÎ+M4f.˟Y+]a{@U^a#\6{#rl6QR)osw` բ\9&5 l%!I`\ ڝIh*8^SIY%y'˵h"^~u뱳^,(hhy}0USJ{A{NY^V*>Kg2dR~=)䬊-|-J,1&1S[tP1>WY};9 r8dQuJ1kn=|<]Atq& J!^i(] lOUq 'ZTjUML+UV6@MѴi8mc<ź; Zc4 d_AfѲVԧ[&]\}SKG޾|0yC7~ez k?No̧EHߟ~Ej?sL{.Z&#\ =|T|ީ`Ąf8ۻ_tJ׸]ѩ54d 8!%%Prٮ\ld'We\i0bfѥ  !eY@/ @6f ;|j`;ݶ݋GAv? 'c%7 Nww8 )%N"{ >6YJgo~*,vPq^^??0(y1 "dnxv~S׉?=jNӻ^O'7CH=_ yR?{nt+˜RYJc1FA6MA& RX ?G@V;\uۻo0MW;].Bo2jVl{ R2};~jd{7ΚG7^RlIOj8ls]=Fy++ޮm»8ϸ`a7lZt4G ?r|, Ӳ/q5?mqbE|\!fZ0Gv@vO )VZbUr`RbD.EvFJ86\&1}qPxk?=v{na5d/vk.M(+[vث]|0KDYdVL`~\O(G@7)TJL0eڦ^R*: q8OwX; KMV Ғ=Os ܌˥M8o@Бbhi*Od1 cB)حiUj__FւU2d,F$J(72$Hݢ`:r?[E>.R'%i|Hy. ɚ$eh+TZIKG!f,c"Ƥ[+ 3gUCl9(w{0G#/-KeaT<ĤTciö^7 koޡhJ %"!0RXhD5RZ ؼwYa uAQ 3L80 iFIl<\? >Hh-#Y/OȒ1u!%U>h3{ ԣ*"Yj(%Y`U31\D+gphVS k6̮DW#|GHa 5,A((E_0Z#`AZVdmGk1n'-άVK9drUyGMI!rE 2f;xrRU.Ug5Q]*7_*KIP#2LeJ84 d36pۗKºkf1/UQ5(N3FAWC;rBDE+Ⱦ w3lryxelƕG{kZy|l/M Y ߁Y xuHu@Ky@y(}r,}WR< ]WZKg00w8bPփ|ƂB's zPK@iA2Q (}C\+Wl,{ѺyT ɋ%-Ętkrx P >cHpkUs5WwVUyn[&K ^V3I|ӗ~1/=o/0 %|LaX#tK^HS@ʀv КV6Eh@;,JuX`(02jae M@+EBj~~)Q2-r|d8NUƬir8e?J$k[ 0xp4T\6[gH*5KC&"!ꘔG5@V!0bd9ub&dA$TiAm,_ຎGFH`j?8ogqŻ-!&e]'6&A&ZBJE(Y>N'GM5EP% рK$|6wrZ~K?7+PA85Ir^Ӝ*͛yXY09ceHy9BQ(z##2!%pEtNLt68cUS*y@{eY业cxG #Bʃ~ aeIrMeCrxH#^P\KE~Ok[u~^#kt_z8=ss7r/}}=.- g3h}%a ׏ a! 40@Ch a! 40@Ch a! 40@Ch a! 40@Ch a! 40@Ch a! 40@]a P%aoG:f`@2 y@ =~9oL6F̓ d|ܦ(!LNRo4ͶCf.Q9BJg͑b3Gn9s< Y@ٮNpTJkUF9\lԟ&&-)%]ܖM$Əʪ k5YQd5sX?NH/Ի{gΏ>g (ozTu}-f«s1\}s&p/7Ur]{<)iS>mLȯ(qI>ibvI>'=G}ңOzI>'=G}ңOzI>'=G}ңOzI>'=G}ңOzI>'=G}ңOzI>'=GI{bԚՆg?dsq֣X@zVws3}1guݶ=?)Ϣ8;WM#o$)LKz h'p`T3bݷ盆޽oOot^M^90ߺ!ܰ_6mN۳ wZiŜ_Zӗ Tn*)chO(#ro|ܚ#͇kq̕w:=8yMl%]koG+C`1`z8 H{{-)JN"0 Kfխ<|25Ý=~>s #:=}?|Aj!jL׷n3F]6K^6nY8nkdICU|ߑxZ}궭S̑^?)z V8>"8lo0\-w]mx(sWlwu6`t}lkE iV ;*}]'[hZOhM5wZ[ː_;92w>> vmGa;>?J[|u$>2|ڽ~y=ףwKݷߔ*h'ʹ`cͧUawWbuXkt0뻷UʿZeQӼ;|^W#xj;(a kozrr^.Ygт)>u>X[D74V' tr:M=0GgUܪ2rgVٝo.S=`uf$dҰѓ% tcu>g'՜oNOWZg;']dq5IS(դ"pN0p ~YPa$Znߜ ?pm1,uC5_71Xr oh'Ω{{r:_:ק) Vc;ڱS*-M4]jsɇNg;gd<{}zZ|G )%y" ^2`*$"$QTZ"<39@`nfg8.+g̞7!xk;^֗+Fi>aϬU{׽v_֩ /r3Vt xw]^xO˳m;C;Nt?j[JWcvJR,NoIHE9.¿Fk[1;ǝ= U3mga4g  !^w9Xm$c+k4on 2sBO|v}Bշѯ [ oQ9^Zn jW-׃W/p؂|yaw[#1Jnh[@ՆcrR#]RD)]гKUww쌖7:#_0/Ll{/ˆ7+D7#ۜ3 劋$ЮdnLypIװw}ZH ܪ"MLI\XC0 }bm4 *,pV9E"zsʴL9Cc.8a%R1R)Z4h9*3DZ ):sE=b%th3X0 Yt4!RRS\EYv@ČMkOpfxW@9 ' >xHbiC+PTB›Dw0APꔹe ȐB04JKJ8[/{ R9ƽ^Dl =ZgH#3/hK^^S_9(rHK4"Y/Ȓ>6.['OR67{E5",H)5Gc$O6+;VA+GgDO}-w-a\hɞsqR^GrGYG(] ȗȨOZj̄b!$`I* |!>]RgraBɳzE0`-ĩ搦R32#LڦnDʃjLDgIcPy<*LzC6qA ѐ['DIgH!:MF^E V`%BEboY-R[5)`%hEm*މrRds¨$AED}¢]_--$E<Ţd2]<+d,ʚוHwlp`A2 \8ܩo),s JP3ċ @iK2YI\:H'"z}Rs <)f@57.cHZ!g c[oC 1-mNa#iAM|Ԉ6C1Z@gZ9%D/FSm+Ln_d%' PgЌQ!] QQz+XTY$o/Eye/(_u2TR+ؕۆe7ui)wSPa}c2ʔ &y!YՊ!_+9JQq@h),Ja<| $:Uye( E7rΘ~ S{7:Ivd:cVGq|1RTT>[+C8q"^>0*jQzlO[TPIQȷZ.db)#k!`|(<x6Hu@KyY=PzrE:+INz, ]$#Se00V8&8bgPlڂ|‚B1 @(!d%+" JP9aj* u'8B&,:NR:[Qx# nAe066>alS$ 8Qщ᫈;Ͳyn&S ,G'aOVx`V?zKs13օ]X,S{%) 7Db6z.e}pP!JE@ @:RGXi6zKFXŎ n: D>\Ѧ'y3jBаX(9Yܖ48.8s:PD pd .¢N$0HqY( &zd1%$bzc"$aJk$=|P@z=jUa]Y!|^MȊ"PD W)s>˦)ķ(M'y2apa( !*-BJQ5 _l}vPP.1#dІ=Np `Niu)#"70Տ jZ |,$dI I3d:Ucce 5k$Go;J*yQ!DH*\ABd~aM,%fFZV2Zh0E!=Ju]u0 #^ )КL!GXm(!e8J %D`_XӦR%rU(.u/"c`iVMjbM`$9DRŒ"!Z 4r:WOp]ꊄީEF8Ø`^˄]oNz/n͖; C yyrcB&:!Q-g;Y~8uYU el(Ϋ%m5H¾uIy0{ 3nyZݳȪ)IQEQb%󪈌/"}Sl;.t(}Cϯ-x't%Jte4 /Q<֞M2>}BEQ{=ޏb:i_<):q]hh7K#TjKP1uH@W3ɹ$EHG=8@Ů%}U4qX)/h"m XSJ ڐ3flV}пx#HPǩ| ұt~ӱ(xOY|`§Nqg& V7?&9&?[%ZIW6XOkeC/͹p\ R&r@hVCd@w\ 2dl"zB9DMdq=LO3⢧+Kwz;:?Fn SPTiPTW+<Ҕj\O4 5L*=m:Ǥ3 iOgo _f>ݞA%Mt蒖Yݙe畯b /"Y:Ow͌;JBqǶ] 6r|9Y#!EРEAq\DSRT̹q<[` g>DLYS0fo1DM NQt)cupqd] pz΁|Vo_Մ:E+A,@wJh3RxZr.]*$Fđi-5<]OEU1@,k>>3ZlfR;E`E)ML JqZA-7e=)I m)m_Ldq񢟦<%K5\Zi]! V1"jKbb'* HQŝ*#%20{Q_l;7far7̼Dh'X=gGW>QhWC@r"2Dca( pTm߭n4I ޺QU;]i+y]hk a*Ai܍F>m(xe#`D.*׸3'R~&愧tm2Ÿno.[{ .Mp#3Q{w#x|(?}҃>Gg8aiJ)KQT (O([J4Uˡ'|?#KA6wX&_.Ta*d&_eAdB^5 Or{puDQԗi{k}&ۊ=]Jp^RI4E{SjzӀ.&ש:кSi{jE^wf!,eR'=/]>畖a2ot}Ch~\ʔ-؛p7VVtڂ4˟7=DnM5p~_2kizRWIRϔ&仉gJ3J,5L`kV-Ҽnr*y?h񶟹7otxyH7*G㛛$jƞXR_japdR43?|{&үng#I/؍u6,X|FCa{qD2 ߹{i 8 6ߤ:kAO3Jv>u]z_ߌrG3gn̘1ǘu<3cexeX2ON&,7l;>ɞmg\.AOtmIwf95>֗sg2ؘ)PSh={ʞxs ?,]f O<֗wڱAyA [GiC&t#@yϦ~ҊNdu,jwn.CrYRgWtۓ7Vه3a[]= 7Oo:e{9L.a,O5aN;."_n|;Ճ/9V] LcK+5ע#b))}s*b+'m:{o`[VВNe  ޶;Ηa6hhR_Fov4*ktSXkM̨3hQ9b0XJBDL$y焞R<ĎۂwKzvpI7 ɳRv&L*$qJz'S x s]Z]4JD2!TP8~rNPlq+5 yv?'ؙVSIZ<[nn2, %K$"gabP dБ - d4h8R 0 (WK 4`BL13 PUYJy>%}kY'L=ttxvO[K:_RWtL},a⺒sl;RG#vEsmaEdyB8 Jf NY˩sQXRT" u)*8x' 0{~\M%#ra3Ffy&-UThhei$ib2#H]vo& Ĕ!} ! xS $'^ J "#1>_* k nn,qɪhuzo\ϫaV7ʂdKOreIbŮ2*%L\?ϏTm(Z{VjUPkD.pb},6dt+Pa $F ]4ї 0elS )N[lBX4(˥CRtط-uP1{?c/ cm _{S"ua[_} ]]-kR wP+rqz%-6 >%Jk#L~,Զ.֦7O~|0|Xf[&^w׿kLp߁,T^c j:v K&d?3} ͇04Q}ƥ%w}ҽ|Zȭҏ_>W?.:0n~҅RjF]e P`*Aiea7~@H0P8U1vI>C6.b۰9ݔ]y7?W/iی/з ߿+ѩ"~ŚѦ!4C BH] G'Tk/roTQ8K*Y0R`D$/h3z ZQPf?f~_Vvee ECITK&(2D/,F"@Q^-O'r<8-G/"z3O87ö́l||Sr7tQ$wio:Va\$Y#v`#ɢ{;ʭ׿-h4Bpǖڜ}F NR)$b#DH(I<' tVnϺbb=Kb)!(P.`0d E(dJED뺰^!hc} NKdT"S2RѨ\4ks |HE'f;)^|'c39)5kIX ɢ*H!0lbd:Ђ ؚei=F@1nkv*$z).ߦg5?7"k#ur8nLU /5!ϊ5τj˼*yvxnii9- [- Qss`y_?~ 1+z_j2d/msZ!ZE:y7H ),Ě)XHJ0$Hi/TAgoA J&ա}^r"te/{ P| uTVֻٞ?^.cW@f^b [$/k$Ъ/4\?~z4]ؤF&ԅfWƐcӏoRXu`To?e!~I}nF yYw~ErOu_ h4O/ח~q: y49,ȗJÙk^&/cmkm-޼=U? -@hГiȧSmB{,x~eXG rʺdO.,45Aj5Tf*Q*k,y,xgr'P :]r֊S?Pgٟ@.O (П6lU=SunTN{s+Qy60?*+^jy`bSSMM)65ج_ VB\ rIAMJYtEP7Hf bMy3Yq0/GFޱ(n/;e4/0bۓsů3X-wwtɲ'?Jr/2N~s#}{_݉pM_zJ*@)6lBcCe,GD.N, `K`co[V(1 3U55EPy=PYOH%ͨ o1$KlHK%xb6jВ)Txú^Z,RTNjmb ۈPRk+&h/ \B}qUYrxY>WAkUgY)U# R^&1v vR@O84LdiٙkȀGl!Sj\$ҔڒURj.&b/Y-A23)KjPZ߄X&c뵋,!xL`2q:@Ƨz7Yիca JtOi)Qhg&0%*RQbld"ٳd Vi@e/E;0f1Zvd-yba4CTEl`9T*E"x$0J;FIYh`CoYR5whϬ(l~cu;W f)d-|Ho ҩļ;Hc2/2Q6&Q z/"^u%Y3Fq AMc^44m4"!REzV6#yHp c B0$]#D~#¤$/^#Pg6luVKErYb,wԨ J2jBSլZvp4{pl139T_/}2d02cIS^K7`II$@NV=X##Og9陔!im}V?;חq̋!<ٞCyK[9GksՊώ.I,Z^wPdHQqEh &CSQ"Ԕ=%*5{qAtRҹ*R4'$hXi8YAR"Z*#IZ5ŵ)X JD$9bQL (]7?l:)m+tJ>7LX3G7p ܿp$tϑZΩOp\|k^of}i>kQuPHFTQXOYV{an j>b~; ce=c=暭'ȏtuFb-.@"falL|FRLͤRPtJ('׎9 *%tA  DZ"jS`;řŃU ɊB֪L]U+.g \nou6qgVQGo7)*}*9E9E| }&5i_"U Z)ݑDtSD=Yzt OEz;}'Y3@p@FȊ)Bԩ襩cg΀RzՊa'q&uh8@'t0KƸ*_+YidP^GMv:Plo8:1<8l5.⺧ӉEȕ /JW=M~}N^*΀wqQxExP9(2aBh]v^)bA ^F'YT"s ^h.#sxǧzr*LE >1whrMuhM ]RJDU$RŤ*0B20K2֬ztt!E{L~Tp}OOi/i=?-)-O&GlZ]r_hN^}N߯V{yM˹hUjp1bE9_ J']i[Ƕ!B}ThwM+88LyP7|ն|3{[j0, go;uK?[߮_>_]uP~{Xtk~7PɗBfx]g3#1-g:gZ[J˫v)cPhOu k#ۇͰsXu]'cT$N{$3$u(]&Sdk<u Q8צA+fƦĩ3˭w+ooΩQ|_}BMJ <=ueW&[wГiLi9-=QUҳ#Q--3vx>tN=t{{',Iww;u}оq.͉C(jAA?Kdb;>:m#1i ~@[GNaVڛ[G-HRC՞PPҩJ:9BI{sT,nx$s6(WkVK9R!DR"$Kt0n$rEN_ǣuW-M8FKHGR?02W4ho5ݎYnzwz4cݓo.6c:ɍS'avf+eU3Kέpb_\]4q)p> - Q5_H8:GVm1ծѻOq6LI\JcC& 1t"o5 >r~W8ݔ_}6۷ غ>6ї-X?4z. B IgЛ/g'Z ؖ- HR%@eTdG,]& }Yzz-oi]eX;k f =m&Ҽ^4#_߮v\z~wʿ|րYe=nzܜ$n&ı@I w=$IX-G u^h4NBOifu"ae)cRJgfe/2"28%Τ QrT@ dZ߉3D{,snofr?89ݭxؠ>lXP㛖t4rMO{d@ff#W{Vgںo%-ߘZHzݫm'oMQ[jb=<}Ivv2FV^=;$.d9v/-vQ:߱P\v<&!A̐ wüB;Wsu]j/r[<3ڦj2=^^ ~3&_ h\|au0뵠ÿ HZv~{,RL٣vLfZ>vyZֽ:M::k,]hk/d xӥ%&}j@.\Q=DzMG4ǿh>CV 5R,Shnc/|]c}Q:ywhRBrpuH2VkB2N)'Jn)`͡G#K^]gXaqjPhh8y@ pY|,.$LP EMb`"|L+XR{>ﲯaIHeӂR@QL!/YdKkxD %ۨl'qߘXz{E3"pjT,e ༆(G= 1]`h#GZ]"E#lx zEdQjU/yJJsxdO'Fh;?o?%N,*'g#QŠTT 0R-'iM6]ws3vo5sxu=g΂_z`eϝVd_ՠd >Em^EQ RXB, ǘAie:u`%y.4JQ#e ƃjǹ2D&gnh ؁lx>cV~ Ky&C_֊+_eo?,׶啶S&"s.NZ\;rqL!0ws d^;'8*]2i΢Z)N$`E|BG׳|P{s0q;WyĆWJw%.mDk{|+⥂Sfo l?~nm} wm .چahfX>!hCOJ+W1YݘË՚3Y?jm=E|Y>a F/]_`m#:TGN [өDḺ=s$?!;:??}wϿw?|:L~>~;ΟpZxնSIO7CÏt54 5l04]>ue2"͸2;-?^n|v6m*'?nDx̮aUFl w_h9|]>( &C>ۼ߲6V?zJ=zqBIJ#\@頨8!hP >J8}$=yE ?Ѿv5y$ $* i e*bW$%H(A0舷N':U3ت/bGtYwVԮJ+bujwL;2-=tvhZ!WK{_BI%>Y*úpfN"H!9M(Seo_Wi4֋stYj!>;m4m/iIBD%Qtz{\o?n:+ A9Ja! ɽq\ 7 ^rrvh\$T>$d$ƀ—` VHЩY$L:'Qx:E DK VHD^:6u6r5_>df;[3/T, n? %V0gmqٖцgc~{A? S흹JyϋrlIs7G%EK*}gG) IGBw&I^ ,e7Ie-9)VwzR'-9hʨN" v(fSDA%upK6()ʨNGOLqPn+qo%MiO,3TZ΢cBG"bY .t45Ebb3;G[w[%bE|ȶ`<82&rJFT@{2_9H(\ˈ||({xt0VPl/BE-Ag*N{"qlRM&+M+q8$&{r6Jh2w6_A<ιljby'Ӑ@ɓC1]_^hg.//&'MX2 4~8s*Ӧ3i&nMf)f#⢜$gG葋8+w{ә{wfyڹ/1[x!K #r<m*M;L"ϗM޻ M֑yCdSuRTw~.44/;hjz=*{ >Rixv]A$d{l%W Χz9 s>Z w*ٹo\U.Ɯw(TAi Pe WAGC8hC8>#)5!6P cR”H,K qewmftUqt_e՞$Ugwͻ$NmcgJYo[u4OY%m)uϭq9dz:yLojb[6e7v~]; z^hɚ Gwϼ oT8<8XT:(︰SIy馶b~ `g_U}^ [oJܓ)1'S擛c~y3@SF]ѩp`0LŽ^SmyB ,e XN/ĩv!ă2L?{Ƒ =sFKuw<8d8X'ؗ5J\S"Rvl`MMCƀm3np C8FC2ML^Nzt5T.0wհcL9lX/=Tƽǫo12̜,k団n Kb$[ c` (,)Z9A\\AA쀻 [YdyOaj1tWZOQfMnj*3UW-_h"Vw˙K8e̖1ǘ uˡ5|u~`j♙bWX1lMWjuDmR]ix}s"ҶMȸӓ7s>Z)wLRnJb_Z8k,N_Y{Zi68Y«=0Q!52n87lO:wO[3''I4f6q grv1:'n17J$3Ǟa[fҜ.D4NZ+Tj<:P^ 1oC|/T8mL6)03냖<(5,0LJFh)ρ,1!d揩Tc{cV3cvZy4^VurPMnSjoxc| W$&Hp۝ܝ/|>)&Q{dCEL+`*Lvt[&ߖk|%ģFJ5KN{ ^X MQ)帷uє? ؕq1Yz\1bWM^߽ͣf=V+]X1*25R|0iOz=t2J*2ѤlvT\e_d9:; >q_`N#"#Sn799x)z-/J|k8;vi*^nSֲ{w{cs{_hϲr]X6)=[:05V\pd}ueNz2b^=+rzthlKaVx }hq+ `(8@A 8a6-&hMLI Wc+q.) fJq!rAZ"Jyΐ5 N!b{CцtLcw3IUSHtT}s{?]46i՞Fc-h! f M13'3] x*e }LҨv+.c4X{ ~}7`+@&+d+HNV^ W~JVV~Jcn4NF.7z'k 42 ó)`RԞGgOY3գ2s$=*w7Gf®U+tτef'_?9KsA@[@H?zMfE YD2k$Kѥyaމx`iF!BSU kCP|pm1rǠyzc`zcoPq܀a,{QA0i|eI`V*8,K׎h֦YuaRi.G# uvr m6)J^ \OcFWY|e"X#3^ܐCr6t95ҝR1խ$#+@1Y 9Zpqd"w<LFX(皘"ېTFI?+^~ɉCO|cTVf6v#g=jrQ;6>ɸ>$J:i49=++1fqR Pѥ9v0Qs-H"BZ񥨿h:kg9/7>ATi/_ n>a+Ɖ(3 (Kx!E RkFh2a6 !e 6!LVZqXVڸQq PJʔ !f-IǬ" cq ƉXc/,YL Hn6\Fd7#BIs#@pVpH q˶rZi䴓=0Ś9#֪: q79hGԅä. \5ߗf ǿV'1E L\*sYx!qwoo O>VG4'I6{Xmx&g/ǒN 1qrBt&\w6jsB~P ukqffc$ 1J2y9т  8"9b*@ n[~m(ڸƅGJY͡uo+M+rJa* 0"(r I 8uHnl~hUsJ;2G`,H]l-xR[:#0 J pvmd];yz^{N$mRh%=I%8@ "gi`'ԆBz W Ұ񟙿t[Z `_ezR^nsw+׏&?ğU?8*n $,5(#1/ghI+ ]ޗYjrl?@* %bɒL`rrfFnB F凲RX\/7.qBG^K֍k~z#)tqq>=b!Q*΢[Nͣ)y@ fv'=qUw`Ty6}3MO/.FuϣoW%|s{y5^[ivI7[^;Z~ۈیq/fyGLSf$h*-~}.pYVp!Z ]%%.2Ș톙1;BI628Ӄ$[W؉ӯ=AgZF7oTY!DI#} frP@x]'cm>rњ^i=FŲڿ#IsY &$) }rY2f,zCF@ BX+e ކ̫FkG^ޏ*X2:z@2 s,3LbNYdpu NwTg۩u:iKB\/&]֒.05 `SJGP n{& j,@XY ©dX^XrscF9y`#ZfgA?om BFp};B#58YM{2y՜}n*m!Ձ vJ>>*h/\Rbȕui ]5(@RmöK2&s!*S\ d81ARQbMqLzӢ@z!@E N9:Zt>Eb9u$6T'Yw?_77 t-Hz4}rlсXQvXϠr=Z2pWW @h`u%e;/*_B@"|.H QAg6cT,9+%nENMVv 5r% $($z#-1A6jiWڙV`ɵ _? Z[s '4uZB91yS' v䫙:Z+\󆚝B+a #)1}"!;PsNؚ=n;-G$ˠPf,$MpYk2u^inMVd| I[٨Xgiƺz^; 6O@( I2-5]Bi @ 9GX%g ]){t_wuAK֬@h3^&za tiuB kd\q2ыB(T6zq g(%vks`qx`qÈ%:qŷWW;z1W44 #R)I\M2/:oI:"_ 5j|N見wŅ τa;?77+4l8'R\Lj]L*n+ӢR'$`|)qU5TQk8vqUZqzĕ,=s^Iʲ)ǟz1זb c3Y`xʺ *@^(3V;IhJ?{ȑ!_z4Ins,m0,9pZ˶Z-{*@ƐTf,#+p{cQXșz,LUD!$ n [USaEJdid2-SXhVZ]P juA.xZ]P ꂊ|VZ]P juTV<  {CB07 r(+ U`ŷεmh[{_{5%XV{W=k= GZ] =b TbOU L4ؐg (L"kz%s;#Κ"ĥ$fa /,]M/x5+Ggm2O#c(fN 4#Ax}sVl>!l/_(^ L#Vn4Vyy)>S>+YR*s/P71Խ媭k1?{k¦m^V>Z=2}65(N7&mlBRQˀUρcnnk3#9,GW-Ѓ3BAxɍJl0R|)yp:f(*g='/rJ:s_ޜ8ځӨ+(s&7rv(z}*z$dvz;jq>}$bwUKcu_/TMf;1_}%*ϗ=k:eVN1jqRc6>UXx8+MJ›d7w'tIQhS*D֖(#C1Nd- ZIDrss~rw%!uI7~<W<=(m ;,כ%`z*rzR4t-f:9P+վw-T*sιF#8_d貴=ĐM 2F8 @%/5/QK=|EpG 8ZpLsLLI Q؞;,poQ&,tsB)e-9-]Mr_ 37 /MFįb?bDGa9O Ly p5 4&cem5X˽w/39B12aږ,LdEo̞ SWҤRҴ-c.} {"ђ$weP:T^f*= sl2xX*\{-[Wu2]>ͳ޼ÄQj:KM!bv/{^»O=9sveޮDn%mZQmas#}2g;= 2&F742>=s|Fnd|˫_^.2?^3qtt2n xQv[jqW]k~7O_XGn56\ur͇ո|9s0I%Mh@&^H[I O*u>{P>0jFb[:.4g瓯WhѮ$>TE\T&T(]p5H2,1,A Bg.P "9+sს] [a@KВLʺ<f:c㶻g.qnCPӾߵ¬ٲ7UoRo,ub"V hTI -x Ev]}MૂVaw<)adDLׁ/E#OvSTY~Ɨk gWU/TjͬYVd]5C_YRR\'NܑSH{픭>6._NdOm6bC昏jJWK8\ 6dYl.gi1S“g&a},sstذgӥ/*y\ |f%䪃A;avL&1qw2XǑGӯY"NXkLJ7k`M/jZ8<8!7 ~bY`a ^ af>hQXȓR3Iz-%z6L}:Ͳ7xԑa7O?2l5ߣJ{tV5VP@wj:P{̀cB};\vj{cB17NsPʠSN͋Dq'燷_>Ө~VGLz,A3U\ kWc_zsb,eMQz#ɚ% *%(ؒR %φer\#7= ֗[[&,qE.IQ }yg'e6*M;>=$)Wۘ"$c{\eoCsKMOgqcPAKGJs=kpoܯwС4]ӯY=%S̛Lt5J0iԿs#O>DQ:Z66ٶ:Ky|Q=zGSN-]iq7<; ynudZ{/2htH/aɸ 9Yt&a 5Fc(SNh}fPƐ,J08br7RQBC>QN!@Dg7rv WrG~¾>*]K/JNb<iR C1r`0P y"pnH##EHZWc+P4N(+ ^MYk+.+GOKT2 Dx()#Cb&ީC*"ӫq[KmM;#L*@YLFEʈ!J4"0 ʩ/o"=PU:jc*/{p"1Ied'9V{ r&_'829b8,dV Yr_n)()&xRc%^Mԏ32i-/*$EP^MU\6 S49׿x)qK;g5"BGGӳrm- 4\֔

qBG:tF{`~ܸ|xI16U_ ?^N_|Xw6Cd_}hx|2kޮ%($~+`k%;WyӮeD2ˬ GRژƓ> .>^9<ζl˛Z^fWNKԑ"y(S_L`l>߂{2&|x~yg'$!s?s㏟߿ǟ?~~>}GqBP ٪ǒ'BXZ[.--찴};+uܱw?n9_\M{` {oq>H;E#M$َXȲ=RH qV*?XSEﭝ~XZ~oP}Z;?ejG/aOlYa GT:UV* V!(~.1َ L <~Sv]XwG㏻-_9DII$}r3!' B/=9IOmX輪1v M%/nc̄Hx*EMV6u`akk⠃$;̓=ذsM wg4 $2mU]izbRb߽a(#3sZe61x)xkf!2h'0\qfj Pp0 2j}6xzg, ݧ^ِ>Kޔ+#jZo׭͏_-о)pTګkbzh'"~В5*hflc","hTR&p]ꄷᝣ p=M-OWE+9< oNX%j}}=_ XA]?|VljRn3YJ%Y,{ňJ"h%{^{{cR ia3%PHjɢ}Z, h>ߞYک^^St&%VY Cܦcp[.er&sȑg Zܿ$՝?JcLB;rl h`I.;t2Y.r(9ߪK$3\PdY)&'Iz#@$ZE"ah٥m"ޖ2UmrŮq>KvTkˮO~,ZhMNIR$d_~8ݢ\+טF1f*WN} \VH*"1҅ jqڰŔw^|.j%`%v8?q04 j?r?v:%QX ^m^h&gh6IsHm5}F6.xT\&c%ɐ0ĂI`dkS9̜YE'\E@NƞYԘ6jRpFoG(NHY! 8e>6dlKN:I{LIh {ӞzrVQ%} כܻd$[f jGh+4+f`]u07;ZYʺr BJSmy#"ߦi["LX8ao|,boedQ=;HGG/nJgFi\]7幽#Z\-t5P0m۹o{BWl%^΍}/i3u!w%b xozu| 5K?OZ0R0$--."k_5+QA~Jʴ,Z+̂^5?xݻ_@s*qǿ%bN~5[74nᘭ͖a0<]Vaaz !=kc}Z(- 91m4yD;0lx$S9m֊Xs3} yjqAP"@h!W¥B ڜ{ P @(iwӸi?o>J+C"}!󪔐%D*XB}0We͚$i_aK<&oTee6uځ_ 5Q˘U[&hh3K%eL(l>[~ pjʇU/X/X %xɩ?NWg{fb"ozq@ $. %A|`&/ VZ&:9XYr%PvP6h.r(F";=B%KbȱG~OZ5WnݵIsmU2W20[y-$" oV^=iUR _pm;w4篺nc E,Ӱy37_LK!ŖATZV2ơ dW4]~E׌}ݯP5R~{Cym"+ 7ɛj7l-E[}wΚ7m[G)Wr%$[[gwt{ >{oEC%^1n4;)"׮G<jIQySrlg((Zuyۛ?ieلJS7 n۾oztvk$nhr+{o$|%UFk}Ҫ RDQb4KXR4Qk5.-% T\ k=9^f&ne*`4ۮ"%Ͷj:z Sˇ,DC !QZ[9j94 Eoz_wwyI.#|oe[LA}eeu|AVR(>(n=]Hec&Wns9ˍY(!Zqߣ!$KhZ{4ʕ\Σy9 *L:buEԒ_rꊨ4t%+`eMě~@(O o*,^ q`Mc]VZ|{ ,<@Dbl!Ҧgi"[TɖPHFsƤ*{r<-0VZb+173ZOwn,=f-!#GՊL,%}0Y&S-VbtDtĿgRtBрőI3A+o.,ȵ$E5t sXٴnK^ͷ"y`[W3sl<\R6)Zɋ`l{k&7`λ{FlR'SѲ)l{#ŃzHEPΠt8sYeR6er"4dt1EZ!Y'9 ̀^vƂ^Z"g9NPj%"ވC5r4/VhV5Jux\(iIŃ4-Y|JW4m=QR7it@G# $drd d12*+L wʎx$\N& CI(k8-h,!;ˢlYVZ#aK/!H')#)< sAO!t^GQE'pt$<5(F΅7I!I/_YtZmud=/V,*9e LXK`:%Sd(d)8o^:/*zaTe;s1tHd2H4hɓwrAiCp\ 3rC+Zݻy{?o0wBo؆J?î>}[+U .&e:/T &*Ns @0N%)seb%|BWy-DhL 4mzcx!1a1X#"hx1Q%BMdB Q٘J 7F-mUl ;"I2w$6(%BQ%9+ieFeA  Ak $*$P.u)LUl:ˬTA= F ,ZDksD0HuH;^MheMw݁&wCw~D(hωer ^JyϾKr_er̥R=5N,c-%ڨLT%Ɗ1YqaI"%3OXųKXg;" q6iR"#;Qz612 %.T,B]FMζ$Y!Hrn^:mԌNkeѡ甡d(}g|K}%Ą3}6כTcgeLJGnw& a*IJ)-i;MV]'In5ߘ9' 8II @%5$b"(Lei= ZXPE*geFЖqZȉ4^hD1@ B0cJ [?_rFNC9g fBݵPsɍeb@t32M5JYr^f82GAZk,X!̒;hS>($V \JAg +-ᣛ/mx Ӳ inAr/EqpY\T%`6{o;ڕ8LJm^ i,^>Y;~8-ߍ3_c;નވ\r59ku~̰iq1#cs1'-:QZf$,x3l"Y(&4vF RNq,1waئgqoR0#4'(yqŎ-"GL.S 7-govt.$CډoB}ыB?Ey_i5W .^U-U2O g6z4EY o@͔>۠ IUp;oB!gh՞<#,RFXp5{`7; 0 6nk>VsL( 0֧ٿ'):<_y~,jٖM-Nӊ19dfLûg]qΈcpn|}Rq\o쭒Gh9˜ Mlt[Za',Ĥ(7MH|EBɄmKIIBz a[/숫ࣇ1SNj]-NwjFg28airTz}^^=B{ai r "y$Aq=_'iKHUR`B0JfSEqo;cƌL"@Kஉhj4"e'^vFN˒_3%b:~ ^Xo)KKQ7K6ߚm6w[YyI_|CyޮW!˚[nN%棻bOϒwtys7@SLȽ*gZ2rMY-5QSߓæpI(8?E%oqxdG/UOA>+$S3bú*xGEIi%ʕ # be}0z)#"b1h#2&"d߭&zs D\ToІUY٧vX~s58HsZlvJ[<76\*C뮀7_4,ɲoz9JE* DO`LDX,-,xGI4 Gt fKOzwaptkpM-v[)J֬j-*SA7\|%237pfϘ=cci=6+3V/ P԰۞^];(EԓԮa&^oOdGm2.thsVG/Iʭ{I_zù̼|sgoaE'N Ӱ0؅&:>w8ˬ737xpW3jx`*aZwm|{+e2&g`:桥]œNPiNDtNz/V>Ctp (_ 7cގ 'Ffo~ n4 DlZ7oA:7?'mG?Y؀Cgީ%{wyTJ S2(PFt!\Yy(3Bx;7'3|P0  m0$g8jvv<-|Y""Alb^gOo-D6zsܮ~Ql3=w?T۱I3R ڮW[dxNxtryU3pY;e  GӵϫG{ 1lZlsZbDSDXգqY=RK QaVR*@\PP(%9wJ(kmy29O8_#˭Y@4kiVhc^9. 5oˣq4z<7BNZ#E {rk(STwNcF1ZleF͝QFGR*:uYq'%M>[sJl)Y%h+zC+$j5%^m|fyvgV0׹LV"R9Ψ8n-k/>*K—9&ev=MXVe/Zj@pJEq9t$H 7= !T[u+¥0!S&aJ;)@px<>wFNKxϏ7Wu/wɼ(:e۱kJV?nn[̥G=m^JymTV죈L1OGa[ )k9u.`ND/2H@;x o'.ʗRY@$1d!Aa /*͸Jb,#FɁGASPd(e{1|mfm5Q>XRT" uɆ.|dL '돦:Z#ra3F!jʹLZ!8KU_H;+DF2^ IE+"$tQ N-INR EFb|6[z3Nd]%Fګes{S;9L63=?çoMNM* vzp9~U*{VjU,AűF 3{;"1[S$k# V$"8uINnɅ;1IÇIzVTN[lBX4(˥CRrL\6p}8O;(D>|92> ݗVӫݘR-ꯔBggG9Za#ĵjC?'QB|j-p|F&?f+sZg_fOޗ^V[f|m> 8\\֖.bTx;-k7_a(2Fq$7t4 iFa7`ZQ)`*S|T ꅞ,\[&Q>j=ɦQʙDj>$]rGGcl$6N F[ߋ=T- :Ǚ\v}oïϿ9&;[8q+0 .m A-ɪ}IYh C뮆ꡩb-&YW b\*rø'݋͏ܒ(x7 P7?iqjF7ʕ$G HY1WRyHZY Jw(z%ŭ KkG`h)T KBQ,ANO$<92\+9LjYMb_6 "ҍn6dB/CM]: AP,[6,r)zӫߊbt_4D&#³;ɼ}st>!~x, f/:m8FK(!@*`j奜n4J+$T΂#"ᷠsԒ7>=+ 3>VRO$@ %Q9/ud8pYT$Gdsk剦_E[W.d&#QHYu2LwZ FZFL&Z i謨=kH| €29ui$&(BD҃,+T I-Q;7 qfo$*@FuvFn )n^tqұn#}Z/v_*Z'eB٪P.wQ*{*Oݠ?~~ QG.[JJp[H@an#8""rA+N Qs:B})nR8 S\-&gz8_!g/7y:d8X'ؗF_%FmS=^$qxP$1`[4{)AR-"$MkYW8cW]օՅ{8s7.a]]'dt?U>?iP^o_nBr 4!:qѸT^3A%Ĉ6`d1t6#gI`xM KB*B@M&ϣBa x"^"JN:.$G **k'VZ{E2fЮ&RB(S<(@K5D%U)^Yqq/&GE0 AP)D$L)L.B.Gi)L!m)7X QB L.2BTJѪ7Nt&QWٵL̩Lˎ?_]e*EWoQ]V`NH]!QW\~2 R+;vuT*[uj3" ]mE%jڊ\D]mCm.quTRW| u[uג9_8X~ʅyxTrQ;_8Z(fi'/ :D\?~}Mg8O\;СGZKU$̿ i$85ɝ85tRVMA5ͤ&Gۊ`JɨL.2QeRAޠr'2jq* Ǯ2UWoR]i0zZ3S8??b!NdzQw+߾KD+m '(jP*䕈 WV;C%I @Cy<ǽztKii,G`z#(b6ɨKᨉDph;*R<'QP(k,2΅H©L"$ <#"qZks 2%3j⁆>frMΦQ=u/}E˟N#*[Zl=AOG 11wa7U'JUPBTEEUQinZxVh˄@HbCG@2h1T^f1Qx&gX6jTS/3j]u ousD$m4q+nmBν^}֨(uŤq95ڬVz]z5e-6v>yx}=/\`Eӧ}9o%~ Uk:6 KaaٰXǚO|nCa?Tk$yYils)i,՗ϣ,u *hd  TP21m7iHּ 7 ޗb%ag3_N:{u|- %p) bTmAL&ʌdXGhk1U{N:}Z]zjX~{=GHs^1mQYܿ6=>FYeܴrsrC$.}h $h`&(ř@QL PJU s,PYr7V@ ~ӓ0pp ͢-ֻ[)ެjOs13jx߭d.V0[<`>[  V+/,sP+OxTқ^Ml;y+]GV[jӰ8=^ߜȆv&t\.Ajcr\୦-.qLBȲP኎^!^홄jX<^,uña{nϕ-3{&^\,F1nL:6a**e4a*20o={aݴyYYg"@'-}U+7CS|3l(E oC|o@.*wwJ@URt`<()Љ%V*nhTK'U1*}WxLf >5wϼLTe~zx^,K.6ImA&Ӫvr l:z]u,d4 ?PR'+v:3@N%tv%O:SPY'vI#4+\1Nx0<yuGw:cW_adA%z-P]6GHzH#Ĥ4p8WIh>j![3C| $x-5Z`Q!{?09[8ϳ_s íɅl=e+_U7-6q7oTuchRD8U!O:Y|Q . F%* Km`u4[mMr=)ZcKnQ'a ng =9G91?;YjsJ)ܦ$+ZGaapFETbyvl\VT 0LEdge .ډfd)򘻮.b?9F'Ċ4s65A7Y4$@C`;A NɼvNp64gY JiKF w7B0M2x))V*hQ"KBJQ< [,QV48C &oKOqEDbkX Z9-1x} BPV#}5ִ>B8OHo]d @94r (pfmśV]Ȥ v&fݗtY4Q$%w')Z:HRB|1!6gE嬿^̫[/:?qɟ)םe[Z h_e:EC8\t~CةYU δn/L@#C2F9<ΨFC ;KvvK.ݟƨobճ:rUQ (hA9.q׏id*Z6!$H@xxxK 5P;W$Y(ךY Y%-֡: GeY1S(kM'6DB-ME Θ }—޾,jM^T tZiTTB(?Hड़/),gILH!D  V$F@Imj&R[N+x烈M͔Ks߼UKUZȒsZxILp,pS}b8$" tllkݱ?fSds[lr+ߴ2ybX:KmkȾQL|2>>W0ֶW$]4`8BI-+WT GPZaFsWE?N̽i.?fNGx—[J{e&;(P1HL451sD)P5rL6VDzt=Eq￿t:D&-Tpzr0(#1,2ƪ6@)Q5Q&ߗwTP$HςBS(Є&7l6Fz9nQnüan%1>98vJr[r}A˔YUvR2?6d9a)A2(/*)ckLݖq;J9-l2-;}_] !o..Σk@?q_ `~4|]Ogbyr @ D18l1g'eB[,PP?$A fS Jl!9"ep\xb7gqZs.bXm٭vG-LUP @rpʖ@<%FC.WC*: i-bT?:$ucs(@P֠+ ,R)FX1Fr$X孀(C:kZ״iԴְ#(ŎsM??\YJJV\3jTYlUVqQKʿa*RFQF![Ni5gJ0fQh3" <5o0Ck0Nz6!k$E#rX{-mEʊe@cЂtj# JD% SS'>`Ї\T( R0.1B6tfIeg2̀2Ly2kMUJlP@nXӚ &TVI '5Ou*36 Z rL%Jer-NJ+38!lD=댾uVK+q":#?lrGuY~'}:L FaUn`$2:jU$b𶄢"dBgT6kMq`*A+Ewmlw|hzV *-\w II.(D#M╕J-,*}d:LhshLȊ ۯֳfgk^uBCI6*KRi_"r+USNL,]Zz;} OB בzp좊: Br`]TV{X[5QRҡV;g'u? 7_m( `J_2 aDNyR4uĀKJn8i ٚ8t?x7gK'W21ƹhR:z&Z䊂JJrm""$mfPJ7["*i|ڌ=Chr!Mu`M&Uv%B%|!ck%GJ30uh`lP'C6wӺF?"GC|[k@zɉMLEG˳MrL:N UrhvY}7Y},לl!w")#}/ӫY=ĻHo8*SorLkzs9/u8#iG{u432U`w>MI\ϦI`V*оC {z#: ׏6x^.ezb #;.I7/N.↓[oFrLgԅqп )fe;sb3"q:~l2/^`C_)˻hn_L8H_꒼_A-NxϱdH^V0ZfTRkASףW NFyu#sůP"h.gsO7f#I;Ƈ ͏Jk7ji 9aone^J1K5~NL.Y-b,(XD^g'Wd/˺d@  :8zZ2،zh*Y>5C}tkNi|;o4ƷSu߮ƷY4ƷvNi|;o:o4Ʒvm4ƷG4Ʒ?Y&˝Q|٤ϓ(\竴COR?`$lFY;xguw:f\ v:Y;xguw:>,j*"SA컊>@!6tL^[cw"L:]PTBp\#[R AXnσ{ңϼ nEcĬ٣C G.z(rCA=ܴ?N"Y3F̜I p)b9 N0DAq=*٣J&}tJHHF2JQ$TY nl[TR)r>'^X S%CXdJk,WE6J6gwT,4{dA|Dn&NYͻzKw!7)Ҳ~#>bb:N b~O/_Ud"D`%1ֳ EلvKe! LP٥`RpiuFX &1 RV0Ml蛑c@9*Cp Yx"YPw} )t0L1l&nx9G@ xvfr u(?7ˊPu2]g}|} K6VYL76c.ԓ|΍ztqU[Ely!m76:F3=:-Foxc1C>4H\O4ݓ/ymp;louu{ Yqٯg'LlQ5'kw~in5tߚ秥1?m[nE_ܺo~\q!AQ2 8e4?eV?KX>1qu>>яs0R|6, YggX 8,,MVŠS=w&&UhDz_qF˺_u64|1W~Y?d'[t;JŴRЇ/9n䱺zS7 O>is`SajsenC~U-thD5-75~_ΒO>z9ﮙhfW̮S'u zĊՕ⍕!0u⧵xcڰݽZl;u+%ԋְ:p v6r=_)j = wKwXb[$a':=L+ fJ",#}G7Ne=ץ=g.Ye*Ct+%whL?w1],e"sGǞi)^vBͺ tu(Z>Z/?mxr{ݠi?q86 4~vJ-$&-,܅h )xU16|<98%&B6[u8|6 V 艧Pk~NA' T{b/>^Z"m~6%?$߃e)M`!2a".}D-toY=:PY=jㅠfTJbx&`̨$t<&SDiIZض_O,΋n:@X3cF4l:]-SzG쟣2$Ae"3W4 r%OIA897 SC_(]3^VQ"k{54_e١ZrgDY:]ECє//&wBT& _V6M]G۔Ÿvr'IsX%- >1d8 9)ZɲV6@NҳG:n}oM)d喑_&aErDx)敳"NuZY٫:͉G!QmYmhpwFӍ.L+p!i7KZ0Z_^2̀ )A#'~n!%Z1: 5HjgZ:МbUƛLܑ֧ٲ BnνHa.%t'@rdsȸY4 K$SNN(|%֡kK R,nNZ3Ë~jz,H@՗STX/6h㸪9MOͿ/5о*U@Wb0,OtTۖ 5imFvdK ڧ_M=i[qmiˍoO7sikn38oCmawp3Jݷ̳++#8ie 6TUV@+6F@B@a&L!eHR,ImĨn_tH c.cr # $wY. Ԩhc$Ip"DhC,$ iI{JI{BqOiKFEmTwoF08mq!wp7$㷲N 3O%cɈ+(70-7o|ѴFu3y/2G~³r;A>9?I>RV.FJ>#vjxZjQiTqBrPYIT,T#iFxE֫5 Gizѷ:Ф}Ёb"ÀQtn.کE2yoi0tnfJ 3m|nPWI7(5?[M0}gr1ʷKT_LW^Eެڵ0fcu9-H-zn z}JnvOgYݰxK~pjV!<`؎=jvU}$-_E$7Vs(9s>,H~]Yk|&ΡVJ!E@dm"YvgiDqҸYN2#30 ZdIo$n9ϊx]4W^2pMRs 2"*Ig1JcLi쁥ldx(^O ){ԁ# * ǹO!tA' I%/^Y-ˌ#}ߺ,jd26Dnbk S0HL k Ld"7>8Br]nv>XZhDɴ1' A(9\jo Gs=N+)kw}oTwZ] :^h܎+ڮ6WwVӤ*xvZ_ *Pmݜ c<J x=y>?g5&~Q3F}v} &Olٲv׬@2' fp\x58ǹYlw<+L[Vmm/"X~|qTU*EޓҢK@hUŌGAK*yD^dq2~n<,>O*tܡL~$D 2(L- AFH҂9S2Jf[5S?1u3kFؔovEiٵn2 c%!HT2(&kJD(/9hdD- eU0w2/?N9L*AʜY&FsbIPPq3AjWSI*Vt dڡF+HO*rnA=+I 6n~uMKTM'Շ"TK dwwֆTH.+*eWa ӭ~{:Do3VxVj .cc\U4 $|n8%5.[rJ` ^n)'%Axc< 1]=;7m5CstHʿDf,&WqiK[/֟O AӘsqwC0U8u>eu%\ѼWOW׳w-'Τ?>]ECєpCoM2sَ >mmْے}Cʦ-ٕ;led2 }p #эgZ=Z^]oʯo{þse<)g]톓h4N~y',O֓[{B}OۺQۻ!b6ɴO}]MxHl8%vzmn{VVu+k^>^,eT;Mnnv4jxMkx\dj跫/?OdzEdSwMpUu=I |Q?`<`QH*W!NYbfk=PAx_5>z |yH2HfXF@_KKVFe# Juej<[#Ž +kGNkTlM !Yl ϊR_acbN'[K}N'y ]aü #bTsO^/ (xQol72vn: ҂AA/ݽv.@\4 :wF eL: Pj(pR !xPdk֗f 5MN\2.')2M=ObSBH2)Q)lXPڢeW`lGy aq? b~ӞOd6;78}M硏"/I^sNG#( ɥ(UE%52m&x9<o]7Rc4[82K%@7,{`ʾ1$+br(vL.uƾ&:ZG䢅&E+;fA5nƱ~ d% 1t5t#)FKHw1%=^N/uX>yB:~'RwK=/q=$NYe^s6(W}$J)H"f`Od/xؠL ևgU Zc t)+}d2X)gY)U/^jm kU'sQX<4,b֕1A-_){eXn Ρ4A'puVyXDe_,|y3GgބVO\7p;׭+ѓsRKdTP2y(H%z%4%pVTzrti |N6'# x)uI[kfكo'J9.l2 Uc](]p=lziq?y8Amuu ?;~>o\cc tB+ 4c3Kֻ@"r9<(bYpP>)@AjLj*J2)1k]pa:IhfX`wlA#y8,}(,jth5iAUGUGcӷZՈX# BYlYd5kIX ɢ*Ul\UkUI$%UK3:f_g|ޫVQA+$,ٷ.qx=≆ŽEѰT/wٞ`nQځeatЅīȐpU9 f1'j,?I3kȐ/!(E_! %LQ!h급P(El$gg) ɘ()g焖3FXo$%;n ge%8~=RJOr L$(X(@%۰Zɛv WM1E*ơ\;d#cH&ߤTY2a+猇W hQ|r{N օ(] *8ek>"$rx_H[d J(-WJy˘bBފA0(s1h2$+#R:ijDVDdd2/ʅ){B6ރSw Q>[|wV)vZ[LuF,y:d .tRC~Pu@B`uP3EƘcҔR+'JC֚c3S۴dzT{Cgx#/@LN%Ȑ-E/M`_PPA8?Nzͧ= JDc\d/42(/#F&;]pY(I PvPM,ϯ.R5bHԉFŐ FlQxExP9(2aBh]v^)4A$W'y1)@B9/ik]]>͖c4Un*\fӬ],ZhCTa%|DbRkp@!h'k0vTJm,Gt^L|Kҭ3sz QCOV CɁM «!u浐q)NYG@qi4gK Hc2agxǎ~ݗDEaTDCO#JW }IQ!!;)vP3KsxBZ12 5p7>[6tqxn`֡_ytY귔G*d4*vJ=ިi4m^ wo ^kq2H ᅪ`٣'z=G|rf^7ÀWW+ =k6=+~$%$}4' Ь.b]X:uEJsn@oKxwazaGz AMeHt9kk&AB$3J2t)RcD= b[lހ[p{{ҍy ikHs`}$rZ"eh;)KPZ߅X& /iG/ !'OȔq$PVUIfbL~AAZ6Ls6ܳL ps (!C *"XOIS4͆'גK t U^0%{:޼Gԁ-g_g|ޓ?L׷YAr}.ӋyT=w;6}aw+tmX*^&tNf4!;gonZovi_&EƝt>ggO'e̞Wh8|>t8yx륹zխɗ}>R;T}rlVmɆ4{ODu&ӥ@Cr #et zDK{^VzBOkQdzlzrRb/[rk/fw>_Zkfs9?MIHa4Vz=0GP*Ş?ٻ6r$Wmd09`8lf_6kcN6z-Yn[RVd=*>*:R:U^ $l}ܭĽzsIt\b[˶ao%\0aZi~f14Ĩ:'[2T:+Cߴ,aY7HlL}"LI9"(êXK )*^KUzS^A+=SGӘxAC}\ ߱>.QfxFZg0|]V+fS{u-zuc⅕60Ws27]XvRNG-]ð^BvvqUg7.0|(O(w/#vw<-dKb{t2S´t̘g30[>אe17t>.f*z͜"ksUKmqۀ3xߓ6gX)41xlȣ?s[SiiBta׶׬kh~8~x%oƊ;ϧN:*5#fXP+J j*…hARXNf:ADA=cG xv,fĄX؛ߧj͍Je8>dy[l{kr=O쿦^TQ%V~r[]UcqRJ`߫U)^au0|)w,0Q~ያ{>dY!-sņ%4Z8l#G2%}V}Q / PY&Ŋm5r"&!R$Keq߱8>#7=V"[|fW15tvb }7Y8Y<@"J[I ":$K9LJb T 줴y3hOٙɻ DTD D;Y+o COMQڪ/~9%)ڧ&h+@nalI Jª%V5*> aNIi 9J:҂wRj7Wfá7Gl ؽu(ՎQjգ5nGیҞm3Gj'J.5wC2LB3'7K!;S91p"`߂ ~͟W[&{yHwb~o s@0K*UZ{Ra´h/< XZbJ)UW : b2WU]WU]m1]9g]=Ŗ?/2בRV@GpP*R%9틗fLIt`+):kN gl ld)eYT`HK[5! S;gկnx WˁC΂ɡ,輚kwL_HtH'-Ȟa_%cVu1E;Q`\pdk,8)2Z’R A!CO7N|NTJR]%*vRv͌mPugO;Hy}SOǴ$^ܰ.8 ?;|6_]!u(i3k+)8AQF|GYl֖b((P6b,N\ rEi%""RLjݙ8;\Ғs/V=j>[0yb;,Pg]h@x5gKp;kS "kC)c@Fj2kRѪ(Z6R1ՑX}Ǧ;g=Ny {w5?EDGO[Iv Q ](!VLH!6[)FF(uT!3q#b>Sw!: {,d|{;gP5u~Q:WǗ>? Y.?-'Vk8?$TX/+*Nj6s2姷S?J<`~wnAttb1AY8 95T?A,}$GJGKq-xٗ5Q>sEg4Ϫ]> Y% #Q`B\3˞67}~u ʣɉ)EtSN=yp1\ L _͞7A*_Z(nt79Wf hjW^'N+5fwiv`z{_'O87ˆ aX~>oRh0|SA| OE_m-lmI-5ڛq6k/Ubte6^nVKqeJ<%Zm{WQТqs >7<( H.;:C-ω_qxy Gx?~ˏ{yGyQ8jj[$S鿽MM V[:|v)»zvV搛;qfqN7JDZZi?~jG<WQWXlVEyT/Sp %Px6A!W ~4n,M=Aݐj﫫g'`p_ۆ~:6>.>Z̗* “݁˼ {Ogu(f1L@N+щRڛT;2ӥ,]Twxͧ R K7V Sv&SL%u+66 mL}5fEB6!t ńhDư 2y2+TT0E$E}')iE$pNrePPx A0^KUǑYybL/"O37;{i412<=rJ_Xy>B<%yΑVTʱ$G+^y;Qޙ-u'Q*-XMT" _ YoG}+͘]Kx9#hNՋ`oQyl[>(2>(M#B!2"*Ďc<Փ ]*^$XԵb/Z,,F_uM=ؚIQuЩIEqk)@8,ۗ %L$ܝAF%bHJvlu&_fdˋa+УQk -X)B%(y2$Ig@YS"cO'.G&H){t"?/&PNJϚl $$gqd"D@(ɇMVXv9oBT|'8 niP=X.ũxL}pS'/)zY/^.THH2 V| F')k$ikAO({#{j}fۡYam-̬BPr؁:\Fp(k kf%\޵t5.mqOz=w'p*8:r( 8$ZmeoIdK>ar`נSkQ䧝7 a4gS~trA !Spq芌1d(eoQ(HCٔ*kIM71V{w`q6,5dMuܻeؿ^0'ׄ{9PBűs ,Yh,YӲ鳉~m8:y; cՑBAHSaǝSvTvc{IQ1VcsȆC" ($'%m[H+J1$c$M(1-2(mI! %+3Қ98[@ę_>&dH!ÇqaKo u"=!ئ?igUb%} `fvtz D{(bOS,cࣱB 5_oIiA )E1I2|xtYm2^|8̟Nge /J5u`r`Dt rXJcv=\1Y_`1k)Tx+)T 95y'gNg3>JIZ /WeΞ|L yC6ɰ]R@]_e0-+8 HQldt*5Bmwmf|{,>-+fc;WN:M j-}ʼ"*PJ4" rBЮyQvJ+KB"gN 1;GBU1 ڐϋlgΎ>Eǒ,tNї Q3/脫T@H :"e&~~;-˲a<5Ya"+zbcI%ȐE/m`lVAx7qRޱB-47N RI,`-ONaH+"E@bh3'UFhh8榙N|`oLGIA aLJJHdmI{:툰l v=9k"ml|-ChsͰwql 0RJ*|bRk@Z꘺y306cfu>&<\_ !|GqHH&I6RZ!@:t(:W"uȧ2꾤\Op/*_.U\ՙ:sFj׉%(?G#[^l;:NHJO[}{T)=E>aOk$Z<09>΂ i\9i JpdS  < \r:B ;ʷ zCXuY0ԶnfۋIn.9r )M }|ơ**7(}ZU*rCʾHE*ʃu/^y.\v}ew>opF9G`n.T՞/=.t`|Yn7Gi7{#}M ^ۻ<7颵Y}?'ugX?͜wD|uw|n/Xf7n3guvu~`FFT.L+TzիŲSMBjIm fy`jUzTeRu/wL^xt/ %"} LS2rQ+ E1neT\`~N_+7jNg ML8kQؤ'u)M" p@BZz6~׸kuu5,kx:k VYOKKm[n' ߡށ9R)A޿_7gsS*Lq2|t[]5Dc);H@5'Su͞GUJ#PR,!"60NqPވzx5(s`?8cd$#Nv!hM:8 WS@pg#-O6F*|N\qD-P,JD!b,*_Y'tiL͛졇'lVWwפOI^ƾObZp}͏_>}.[)7HY!dC]pvLeϢea BlЙRXYr(+NpĘF[f R3^}ΖdSC>Y"RfK=`+?rcxL-hf#*7/mTeegIړS[Qrn(N+7cM xnz o?tΕ[|qc3!׊6k CXOvv~PeAAg2]tv1ALV.eCtv.e:Lg2]tv.s TQ L˰ ݱT)2]{Ji~4ų3j,5@tr@>DZ;Laܘ"x5,_m?0^,qr+3fikj>Cߓmvw'&Ӊ3SV28)tR*/.y-PQJC ‘dlb65ld)eۍSc5i+uscfl6|Qcᖗ||4ӓSQ}NӧW\, Yld)](YY< Q`@hWȐh-R A"3bHŮ'GBA%R{6%+Rj*ףf/T}p-~ f_!ѷLp/Wx41}+#Qw=Z^EA19?R(,k (^sSH(94t3gZRN@gB !\3ȑkO8=sķB4m'ݗt_rfqaaIFX ﺌ{^lTWCɷOfɞonO7CrLq")-ʚV}ïwNBQiTn:"wr$Pp 3%BX2hctJMd˔tеm,lڠ2ITJ$ o!x6٦ 9^Xr%HîflMnvWvjȲ'^%/)?nr ^~5ElvPҮx,Of?xyrTlT :VE h6l9srZj)}b~%]81KϖE?懈hb_3DpTv*,^A -"{6lqHG!kqPygifƺ”Uv#FrBcZ*Hyl ?Ǿ4Ҵ:". z/.hԖxD ԞrH8p^7W.}lAcr}%؀ʫ,,eS5:4J0m>e̖G}#uk&FUW'7>=g;ٿ}zTZ}η<ʭ 7Z |=Ͽ}vlj|6ǩƏ 4ĴɳjfZq y3uE"uCF(YT_nė g5CO&{ܱ%h_[|J|U%R> $kqQI ݃ٻ޶r$W6o!d,&ݘm5%ǒv/oJeI+K$"/x:ud4e)UO 1t,tk\ڈ-~S {w2<@,^ <$A 8*~}<_<6&{cjZwXQsOGgaPyt=;-7ws;s8\jۋe=i'!tq^nfT> Y̻߻LEwbp,#{ rY*'{8ёܱϏ u8?`|?=|a+pM6~X?㻗O?}p_N)ӏOp (LDUϭ?M[]c95f樛 __~O[;]?rk/|OݡϑQx#_w3:a+(6Ř:;Lҕk6/aΔP` Ԏ,KB/[o֏YWGē|$[;}$Ey Q ΗTH8!ڀ'H~01yU=ځ'[b  ̀AS#H5I $F+ u0PtZ&6 (FݑmO#7;OA8%lAYеg}L vոlR*#Q.{pjy"'2Z0+a^ʛs>la ~Pp 1PKwQ Θ T0!:pU6?48ښ-W𽷫k\lu.WoR R1RN~Z\͵bͮ(DrJQ4/ J8qoVq[<Bd9K IBJ1(GՄƤs2(܉v+c"\$V3͙Bre60Qz j}['W+q͒jfՈMS^VAK+QU:V;V^ ?:+gr&`o BoI%pZ1e% (˝ǀ$Bn硁$ 9+58O!YstLA\ dm2,"BsJ^*Gk1L&p+@xDq%ht;IQEfuS)rK5z]tt*l>{f5M[|Tn/ڼPk+"7?2A7lFVF!Re,@%\[9%뵱Q"nB2P#Ӊz}&D+d@:V!fc2[L}Ww{qdfeDa!ذާ9both߯~X5k5#|H{ xts,hOHHE6՘~7V^n%=VU: AD(+4"(+8hhan,䀪}>I٠u>$Ug$ƀ I2 VUBD,3xESF%28O@YaD5 mDCBf3d8%NDsa3_>? 5 lY9IsB ix|.0c ; b ^]Wb?is,ĕ4iggE~V,zB,*z g)hk=kM5ʃJX4aHp&:&u hÍtts-՘+As?87]_F('wVۚVGh"1tX5׮2ֳJz+po:3QV߀>пZ њ+ RG3>D-A+'$1ak,(Ubrl3fԣ<Lȼσt,*⺇ dOeX1l—J1֛o~S_7[OkDnՌN-Y}1tf87FUT9aEe{'[4_Zt7ă>N8/DTbBksǙ $ eڒ6m)ݛL7!f2l0NRpWu n-^zB[u ax̵!49ψw#G#6ZB Wh -\p-\E Wh -\+pZBTB Wh -\+h[ -\+pZBKZ&@ GH(ecI$2v,YZ=0K %U& FB[(~ o-Pǰ}P2iim-Z'Cט|ͽ0[`NՠCWgg ֺu嬯 V_jb\ :DA:-URI@Lk_g$Bm`*QfE mv74*xlX\u%Դz|yOZy )o2/֏cޮh7֏xi*lY6IA_f&{hꞝxƍʋBQr;mअW+%D DdL H88 džF-D8 pau{jz ϞY/0'3Qb(1Etju|9O!? .{ t֟j~ֶ+;\}l+w[Pi\%FUQ¬Iel|5Lv5r+ǁTGӜHʁ %C93Bi R.3_,ΓnHs;@#cW#z{Y*B%_HDQqW>tl~`>e=UF# nu'tp_P @<)09v|%DL ^6/^fBȘQ{lKYu&Bs##$T1FA=^( /MJ.%M=#CpSyo9@"Hd. 읕*KA9Mekpޛ2EmӨ-uDJO6 mk›7}v>\ZnnCJҦF uݹ+@h}G/Vc6 j_:bfWMvEc'Wf~56ʛծ6Nֵz9Mv.C4B}\_p\JOtX7', cΈ'+̲;wѩP,]+3T.Qe_{ ~h7$zpo[N4>mc;+ÒYxݱ:v.Hdxf.XBheǕN&:ɐr&}UC`f`V{) IeY)&' <RIDດ3alFzM寻-4]5Cj物ngzk鋪SU*KspQgu I%6jIg8"3FrLʲQ1fY &ӈD$zRl.E̹fk8W%E-^2F);VƶPT'O0O.rI qxxtpY/(P.EjW["E ټp>g((,( %Y1ˬ厴6!dR%Phìq@2!CE ɚX" #(|*;"Y9a7:}D%lIĕ+^;60 a!C 1F=$Hc'DI54@-'۬ рEBҩc& 4iF:!1%b5rK;煅Ykl+UeM.6"k{.)*z7Ƿ˫ȽO狿0J8g7O&4+yCr:KW|:7)v# i?ŷƇKM  J}7F4qSk#9@-3IZ?$A]<`\l>㒚8"P 2LҤ88gFK^ԟ\'$O҉fc)rX",nfU\mFhR\ }SBmKK+R, lu9oД

x!\Qelo[c==>^Lkgo on!}tnƑlϖbrO{`lYO/ =.F,nn.wĴ1zD3XfgD'+g l EbYOs)+Z>:qAΦqQ+V4iMϴhz{h9񗿿}Osכ@;h&)rKHj[(c]Z]S%] 9لoЯ5S0,ܞmF8/}QZwM҈pq=8 Ml~:v=!=UKZ؋B,K&TN63Mԏnґ澂o\G\FIIH@8_qE\ސCIz2#=a.{~GڞA;c4a G###LrSa ˙d&DV1,2Lt;Zlg:Utiry/NVN;Um]aۓ%aΝA?`Usٍێ&4OV.4xƇ"c=n64h x$Wp籑V"$5 (JV@KZс^n y ID2wxt|(I2CdpL J=JPPrަGE?zdp!I1DS A))jϣ3 *39KC2o@:&ey,"夗)%VNLQp܅eުD2lϪmiIѧn`gF=X;YoG[{E }%IkUF/I,E YD)ݸs'$K%Q'6 X Dفc΋eu`ZGa0F|6#LA)ϙ DU*VxYVdо{ȕ4 RLՊW])O ݖqKrݘVUW|xqi.?6{uMijԌ@ir\Pu\G(J T*4ɋ`lGYz̞gs;sYz J5UĴ .kmA=6d%JIh&Hʪ.l3Ǻ _JcI!rcJL@V""ɍ(xeHW b'NjOsoǘ7S=LawzB?::҄:j訡:j訡:B}D͋y7/E߼},p^P3,:$bs!Wؗ\}xBLw❑ӂ~ZO in-4NiA?- ӂ~ZO iA?A/Zii}V԰JOͬR)߹+)]A:z=zEꏫj5Zk[kcg荤*˹J%`w6F}ph!WwV0{A\\ܼ`7ۉiӲδ'@{z>y27_7wU5AIna@yU8CE yH!rZ,C'7Y}A\#*/kEMɕ*^v}bM&BIlsdR%L.ZXv>&YT \>Hḟ7=G7f_\m_hηxk^O_I]b>&0 L|jotҗS`,Sfh4X DYU[Q;pt!BQ:2,wDvBD  -",|ѪXj˱7gO.xKy.0Ro>[*,nݔz5g;lzLg˙/U$9}p^#:MHi/|Q6gpRV>ht]t|hn"F-˦ZW eA6IsipL?pƫsD-TǀS*,;Ac3UH92)*wճj&Ee!q^B{8"Sm@j`̠ӈY7qvԳ^j6_W\>hC\(TTUGomf+v0F! U|uR*pQw`+|!@tʒ,#dK:>RҮQNTբۋNW =<ϞJmt5:rޤl"$C8X 9j'UVpt~~!l>>{5(M p۔;>Tp)S8WJ,e*m5NZa,*ɵ.5i<6t=sbvɡԑdԐ Fdm*"KPN5)!gg+8;bY`H!{iҼ%lo#\?gmhn5rs,6e.W׷9GMhwrh;3SY_"L2zo㼜m~3Os>d\O1'_+;`psua͎˚?7^閉KrX|xr|dKsuOj{k&S[ىDF|hMķq%FT6m>7Gg?f,u-G7À1!c!_V m-[ "{DIENc\bEhB!Ԗ=VpT\U]Z`"ByƊrq6ѻ}P'ݺ8;ԡ%#BqW^{w3at'M~9vȈqrnn~.?漼g򕁇G- D֞|>T4ig b|;,D"ё[7!)NxnnD>FgBf+fR:{[dl"mJ&֨*S%oj& 4袭''8N9޽ֳu5br (`[Ȕ╳JjJBKR¤ݴcϕ1]G6Ez[vy݅ėzxS ֋h'1SL"kpe޵:y%hOM7>y'gkHs`,wDv>|:pwܲ)~ /ogiUF~d1X0hKHѝ#p|p}s~T`9"-cW#hH"'1eelL׍\;F+xKБ&D5}ɭz~zy=96DaBAWSKΰQ,3 ֊F:L&xb8;sMx皝{(>w&W&ٺlV(jʊTLz@@Q+68PvGU8뉡2(˺Dg`KCM9yejV2P̃ȑQe$p@ڗrͯr"CP. & N+;L%N b|Ћً?P&05P?UBe]WSYsaJE+i B,xsud /`@zO^=ks˾bXL$eKNŒ[=蕗+6,̿dz.'P\T.G]1w3a?zIF w/}Q_o'aWoޢy[-'oۢy҆6-%Up\-3Z4EޯޘӢ9H39f3 y= hF1&m@ fa A+uQhTtY;Qm [ݾ+ ib̢7ހP{﬛8UO77O{C%nW_yji8^LuE ڇJ)મB,ߕ 5ĭ QFTbhꊌ.ZJY@S`}1Z[ⴃm*i72vg;2nf)oVƾXh:cՂ~oX#GJ#Umig?g_tqq|vO(z!IsE[et| 9*؃lIQ.7m;Gwky]6)w (U X A.cBg(lFeMK$H!dbžaDi5doCQKi$4]SuxX<N6L9q鈵䭲 ՊC1_ ]<>^kZw0)*eђX,"r#+Ȟ!58{(x]"7]~`q(P̙RrV RT*' sb6 SA^'36_bjh8"@% aE.Ρ m(5V-1NXpNHHB$#J0WYuFC`Ji0U)Ϸ,ӑ+aji&k櫯O}>/l,U "?Mn>5?ZJธPOX_.<9{?KM':N=jvY#[.o?rΧiYo&A_O'_^sa~sJ~Mn{hm:za󷓳XO}>ϓ2q(dYz_ԩ[Nj'lr$9Zɨ/"G2HfK 32RdeIh_@(XRFS9ɋ kc嵑/ߏ5)D&"I2vZQ *lLL^; 23ϋӱ4;5r 9N ^k7]xnUmyveQDԿ#F"}[WMka8;=.T+U R.)SD@7JNYH(0$re!,E?\2IN&Y@.۞C -ߏz 1:6JL)fUJu諼 y3_Wݪ({4yu~{6GL"䟿frvXC ΫNJڪYO|R#=j6]QWkG]DB=jiI>_ #, \ u ւ;\U+56 =j{W/pU&rpUTWW($#b{W\Wl-UMW]}S|a??J~=gO#J;dXPhZ,j(Ll^@!g_Ru$ , j:x#`t=F Yt9L} Ƙ=o^DwSy_x~!䄺L|OJR}48&~/{|\3Q7i&Ѥ94Ghmsh4Ghߤ94GhMIs4i&Ѥ94GhMIs4i&Ѥ94GiMIsuO]]AӤ94GhMIs4i&((Q_~;?a*F`H0{(1eTNFB$;5qrE?v#U,$eB` d]ADAS]~aL Af**"|"C@?hIZ,`bJhsI&%CRNr^bAi^ 5[pG5pi0s<1&x版o7~xP;ߞ'z媴-r'YvC[KBdd#p).+WDG)JcQIo:k DN>$xWKE<\M?wx_=)Cw9؏2mo+xeU`+ި.NꂖF]uD$F]GRxh>jkمĘ05&zzkT=#g|6gE@J)DRqj-J; ||N[͢ff6<94}Ұshcmz>Q(A8EV*R%9KmMI_^eUՌYYS^E34A'6ZPz59wh~W|օ̟X!!?&1oܜoo=iϠz^>s S޻L5A9]{KIo)*c#2E9GљJKcVu0(B0.8 ْR Au)!COIT\PD9iKkH842fx;Ky,3vB50ʆ-LҖIK8o:||trV: &ifV3DEق#b*(e°\ cO)PP*lѰrX+ ]FgJEDK#`܍d$cAc,j7{k4:O ¢n vZ8g!x'=gmJAFddâ123CEGȖ&eʊa☫8!8p0s>_YT`MN?슈00"BCĆz].ZYVS5MMZpV.^ s$95gAbdQ3!HM6B9Vh[Q8̹G_u`\wUs(/ .Zz> GS;vC30i"\"{~Yu>V-,[-}=Z3uokaujꄇ[^h}S.#A{!A<=v;a,;hWsO-]!E_qA&z.; S0ET7G)`U0du&:%L$8 4Z%bHJLu3+=ӓi +WG LQ~.;.;ϴx<DjљNA^.:Q`8KE(C(=%Ddy. {JIJV_HdNYSP;Q4fyص TGp>'Q US,QG% Q,Is7%BcDv7JnnΞy 5~g%< yl癨É{Y 6voT>M:ǮYT>BOm|;Sa%_J:Nİ)%԰jFްJkX5#jX5auVܱ_YrC$aTT0 %וE-rƖm³֕?Q昌[v`(drN)2:)m fġWf=Cԡ@t(2-lb4&Lxr6S?a){CPg|d.oK(%:ج6BQA*=Eɷ,r̘"}=FOg)Xi8P@U%B.9A>E§`X/@'$;rU UI2ek*PP-2}}Ƀl0sg}\z[Ko"/A/U;ӾD +pD9}B`KP{R':WL.BԩC뢗R0Z545Պ'< !H*I0KP*_242(^rM&)P tm1pt,z7Nz7^r*g+'Ҕ9Bb4y?Hd]OIa ]1x٢mv~,ѣEv >3p𭗒#<@E lŢ56JD.|DbRk Й L-< 6>O=Ddo=hL|Fo:܄6[@H} ]瓛q"u|'xNɌUc-BsoCkA]i{xmvUoKlЮ!~ ^\\5žH?X30i2Ji͵+ڳtvt7w] Wɛft]mj HaU8|m1랝c~;ȉ'>!5À*im˜CѦFPHsbћTXғm*^Q.H~IHQLPߜx =pƣ7J٣FB/)Af/Wim=w]WKڅH`٣gcli5ox]1^}%ZK"l .{׏5:b;KY4 Xe%.@]Q6Qf77%XG lc ljQ#蘼8a$DO2C)L"u0各44s nݶ&a*TT<{}_sN@fݺޗp򱭇"{gh_97xd=jqI p)b$; $;T;)vkQh0!*9D%{L*HZHJ2f3JhAR X W]ҼphZ +U,[&5twKCdO5jE~^: o∷1_ b*CEweRRPm'eIJ뻐w u0~x{xY9!|J&@5(#kK4{o4` ((Uh^ڜ}ΆjY9R!DR"/xe3rlxvr2ڄj_0y+K.ϡdڵg}} Tnx7c:˟Mwf xifwLO=ܘWI/i=m0O.}Bۧ~rӟ KxdG_Gwۧ9zdOs>!xdx|3x {̼6r;?\M&[[]Y>7c^?g?R;T}s|Wͭ#Ϥ0n=\c_u'a>TՓ5qʿܤ:mXhduHt=vlNAX|"} @8ه=xn RuqY[:'HSTcb&,^'!9:\TNg RZ 8oqP0}HH)RQUtI(EP@j3r ^;6̍Ro"תՕ|\†rj );hyV?7=>wۧϭ+yL )p8Fp71| i HK4A4Bu6{R 5,QF")E9%%T lb.EMXĔ*"TO#c&ZvY8A]6H9EPţfhR=o~2{6$fT#vLQ}EnX+;_]˿HyaEyyA([6`DFJRi RA<_LfiUϠyޚ *"/B cUDBңPEhU ѢI DTE; 8N_YsgmSIlv*>8m< J {AE>PmJ KĔ ͤ&>a -6p`hQE=QMDD6K-x堇,ɾq5.CʉA0DtvQZS*kJƕb^X[;HWZ uXÝ^ Re(ΓO6ƅUGpzQ{WJ?@}AXtvdɹ o1$K`d :JVfmR$Kp$!gv\&TB\^ZfʳP(|c nFn ~g2pzTPh[ᢇ}E%~Yg#+c6KaJ}/lT86zpD R*y,R wuUA]uz$^]]=`]61!몫Q먫Q=+ܠ,_`56}r;$dn}i/,t [t(BDlv^%:g ž'RY, ˶`kkd!0f!ԑEיzMƵ&do B0hBLG>%EhJ;S%Xh*hۺ'|~ -{FړЉU*Q .y{ 싹#EL}q#W"}o*-z?*vUj*qzN5ŝb٬4 uF,l3Bv 2Ζ@fK|ʍ5앵O$IV Ni1Ǥ)eV7W>`II$=_JM ރsRF!9Q2%Hcq&ҦԹ8y9{4S7O~^n.-li 3=}U4KژtjbN87mKorLoz:"0k嬲\Uc}%=F2r:"t- Ld3C$z$r/KC"bPlEM),TFDR 3-0vr'50*Ȣ)$%DG (LZUTP DAs. (kҺZ3rvCZO> m\օ/KCPPWBAN'n9ϻE0Hi)ʺX($ #C*V/\lA>wE-~UbS}DZ6#ܳI.1bH.8_tB1 c/Ɛf6 eh&Eʆ~P(El{',$cZF~A84Z%z#)ٺDP+rvCxeZMbn:"4&,Nn!jc [T#azz}iY &:o  0e*#VN:`gHl!Bp1x]`$&+A%Ye9{ĻAI鏤dlӣ@}Gyu%(Ɓ#D'WFK^yG/-m-paY Wc2fF`\ $)eUFmRֵV'd ;/”o[kGqg[ӹ|xje/t(J$m pdRgh7࿋ Rްu~f->I f/*g],[(d {2[DJ ]8G±ZئrE`(CB2#geP$l- 01 W\2ܳ9{\zbjQr_br[MV?{׶Hd~ ]B /̼YA .^k}BP—22ؐeH!GfFlK PVV` bhkCM <|_8'O 6zH=#YrfKo,AEL xkl#GjF? 7_%RAoФlM*(Lspr`e+NY#ptM'xBީ;'x;e&L>Yٸ)\.$cM 5`UBv- i;ԔXxa0DqЗw_'3\ÿWYϢ.-GV/j(G]m[9|i/yn__J*%%k}~/w2qgN_)b;:FpZwgZ3=8J? ;-ْ aq o)W"BFwڅ;=A}: V# J+"QS򥊥}ߒ&K!k~E;G_un) \ɤO|tFࣛ8;13z͒7ϳ˛5}mBI'՗}dZ6\<%0sJbF>|km2Ij;.zh/jBz-``SmyTuGHUD)h ɻ#+gmHGWˬ ) iyK) BOF5Yb:ø< xOr|(m5VŞ-{Ukvֵ 1 ! b (>+#[%tAW׭-'c%f*fcZ- KdN^9*Bpd6J9G TUDTs6E}˰L/c96b9qs}ﵮrfe o~lo1[{y֧w1l͆8a=2e*7w9oeBnmڶgMjDhm֯a!Qmo}+=|qN[&}=tӜM(#9,ui7=_f9xHf^Yn7̷=ד[2pn$v=桤7$h<2(X>aֻn&+L~ْraacQ臒c :dH0D-eW7?u|E~~?lVˡ*j3近j`:)b.[$q\Q)2d 1@]7R Vlу 4+]|}.--_$0eBJUq#6'd!lQ:Gե}\]5Fq7n^vU<ۛX;|yqB.߿~mN2& 㨁"*h\* 0yZƔF٪t tتs!2'={UBr)e`$bthB!pW`z*E.-*P81NkKF.k&Љzug\1!>"9[^T<]ppu}ަ{yvȈ%j+/pS53yZG)o ;cBU1uX",$dF;^'kH|8ƞtciO_=mμN9J-1O ?[ Al6]ߞ=">+^ AHT@ E?謕IvcstX:XKG5hd]lFkQzN>ncRBx"1 xŖ~q|!v+zZEʐ.Y"l碓ϫ۰Qdm 8qDr0'\4,:)ƛ]ΡԜgt։gm %r>Ƥ38:J!ŚYȄI#ąJbTTn/PUb?^;Y m*\O1dޯ=,QyoVGT2@%pq8/_yUXCZ +Y*,#PoUo|Nmbb`Jr}PfcbӵjEJ{gc hk{y}vSׯIoӉ|(1cixl-vwXGNT*J峧ăs< w7WӋڱ2Dt0P&9X,0.OOuS`usJ6IA.!JYjOc,!-T/eM!}.wOpjT+*ڼ((ՓroWV0{; ٚ4x 'T5Tu|ѫ"^}gCB!&kjQg]*ի9T]Uh6Cn &^Y/TطeU8e#8?IbTtjް.*LsW1&B iTMQ9!iU\X.G;:[lvMꐷK`~yߎ;7?GM`'TVco{Ԥfl{ 0%A=rYh.w؇JWUP`1Q(>m#%YHgĬ bcL-RMyƒ :Ȳl] Qg-j$v5XtT8 Hh!" PPQíx|ʎ۶WrX9tӓS/01%xJUV_Bw3 _NNlYRzTuECCUWOt8Os"R 18ktQuEFRsV-Ç bh#(8!X1Z[⵷I(U3IC8ۑW醅4c_,4PXxB3^sJ38;%Y']?ǔ)/?!]]]}pĮ*`O O!iY)Țh fxs_.k 9{@X`z$/2BA VXX D+qU3bwg;bIYq1iDZQ{d} <,\34FΪ%:ULF9Ek3Ʋ슇yL 0C Ee)6G(Eʥ"Sَ3~7рh|^/"ΈhGDq!ćkg(k rº}vJ:$<7.-!գԆ3d FˁHQDK~Lr<51'Bԃ C+6'muL23+[WE$e@0Z^ }7S?< ~ߦ$U9XO] $K-R빱Jn<]=d9zxP |J۵:v1BףSy5q5Ob}5 O'ǛqjMY,XZ="WV5RpWU_'>%w2ԍ T2!|_laW0}aSi0 r{aۍj4p6-|[wF/!M%iX7tT k.Ɗb6 :mLqO`b|eEY\9Fn/uUɦR*&f]ꬌU)KϟP^}6K8W6iV/^4䆗*ϋ F0|;~:zw߿.?^~2շ޽%"Wͣ%>?ڍ*zD{W V\gZqBջ4qӢYBGWzcZo+ TUx&Pu٪ 6#kwE6\ˆRKu_UJ| UspE-H5ޕ6JڔTG;]}S8 IM̨6Rtwmݝw[P5%T%}R~X'^W~Oi'Z4`Rd %ų~|y.E{WTPviOiAn:iD aohgmxuxjq0ৣҹ^2խnvu$/-M|Hˊ }@n4Vn5V֛q[: p-(`Ad4.ۛr3RCF 3Ɛ89c)pZw@WσHhI$;yꗣյϽS8ߧڡ6,&s{+BٸS z]uyk͝n\&g}RC {xyV*%$[?}ZWmg ,7qa0?o((pf1fڛ 6J+0׺teugt{wixm} lMբ6 jn] QuڕT[@EVFya.G-wq@y?W467ˌw4 x5Bordُy{νM Mp3ir$SPD=W4(qTGRҷ%$ȥ]AX@4=OSvſ`ӧ\ˎB,*ZL&:YԆ+ᕗFiTl*D- j'zMaBE$yYe#Y\ض5MƦqTRRrs E*(t$!E3Km΢쭤c.ORPg'`ֈVXC 3BuVR2C7ԦJǀưTDΔH]rgW3Ո-[7#g#H&_Y=JgIhYiY JJ&-LTDH}.t$:313(3ˠ,l}`9GE'"dl9tԪh gV[}&ņ*ӨzFj]6d\e&9Fh*0 AoS; he ^%9 Pg%A3bR.a V[/@4"-bK>uӏi>.Wm\,H栨Y[/`IT{aJk'Z]RʶF@5I> Ţ .r x$hDk̭!Y)\;Fh0Z>]VGߗZ%KeuUR q%.IVg8:?Cw5cܡAr UeKDVCH f$r/<Ćo8IĤՔ)5D@$FOTQi\ <n`CH&*-ivh%q0^+xQ.6%.0@f %<]qۂa8x)V75R(+]#<");GeMr*WNԬ8pݩ(QOQB2U,Da$psp *&舵d izCS:jr=}x'Ypʼz#X"5R> cޓ2ӏ>Y1#zG"p:EԊ;.;BXhl[jd2rܕSc$ "`ր* MHp 2xRP%X Jhf4oI)U$ !a_SS% KQ3e"шk0,3Ikx"?&&EN㚅 L1з 2~Y[][/I$ea84_(*4 Eڡ;QD`t`ۀI;퀭_[XU.V9y kCm`-$4> xR:pR3-J6J:í.8qb*-d`(>\tLDcBL`.K,:/HRR| LĴ(yY@/C00%fB2ޓ&i3j@P  q3"p ,:ݩεJEO"-"X,$0bKFPH-Iif[MR$6Ѵ7@F^L)@E:Jq VAhGcU;f4 k9DFϐB.ds VUT3@ŶD#[4Znۨ4<>p$R'e#s4hαK,F3hAqX(&!RPH)E+Y=JM *Üe>F4^8YTeB`:;|*i5ۜÿ˞`y!fQj,\#5P7o5 RY\Go"l{AT039b@E}A&uP`Hu&kq1<ĭFS|@ o&3 R"Y YmQ-npd3 L[4SPPv`)j j]P!58Ey`P9Aq=@('5#}Aeo:N yxBrH9P.=dkh+R(]&TP=BCpH>bHz;[52'ˉb\D W}nO.^#п֬,zzB+ϸ b^ة !-JAJQՌZk1tg r"[rC˰TtOW$ZXd3= R GRI;ݬōbpáImhxQR& 6m8s@%gaN/~i8h /mߘ(`8Zh~77um>8~vGӔf '͵R8?+ߗx6;Z^4\&= ozbe(lF\ch2.+^-l^KZϷ.qq{떓폺ݶ,mp~,p>5v:Yukw- -jh9 sP?r{" /B^NkC!Z= T%@2 "H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""%0N c|A$U/f֘gO!Jz$u)""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H "z$/y9$uŐ@gOA0K$k$ %@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@Db~D!`^ p~1$5ٓ@ I Ɍ҂H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H "^ ti@ڊ_4KMUwՂpݩ<_?Jӫ2K5`_ TK5Z;Tt.ݹ- JW5`_L:RU瞮akLW[[<.*VdBGm1~3ڹaFmRiACzRGx?'GҮ>͛Cͦ`A QŤ/\;(kMѰ:։T֭bxmtsW=W>Nߦ˛JX&p^uѓOo~\ou7߅Eǟ]_ZnReBUv)bz]n<>j4eJr ٮxܕ[6V#W?jq:*&}sߙӃ`싲#l/fAΦp6*1D}ӊ:Dm?Լ{s}pw |;0݉Vx 4z@-2դW{/}wﱓ{oc+^gWi/#?tbD;hGN$l&g }VO-תA0f{geo fkb\:b(,v߮/YcD旊qpn$lLbH! 9&'n3EI hcp[}ک2p8wy ;dݖ=UwsQ7HՓWǣ]OgESwSeXU{4/iVzG+2N%&`G\aZTJw̅9槖gf{V+[h=?ݮi{U(Ry$1󗴕l ؘ󈳆k_̊w'DG2;#:_͇k6-E+pFqXThb_䆟nc!g#8efVT<)C-&SpB'v!v3 n3l5br:K8&M/?wBvtkK}o- dzʬ}5>Pj>+3n=[c74+]so_t-# /Ւ΁48׳GuNzsɕq_]8ͪ^W _J:s{M{[ps͇y*s4K5;<(ݺ:P\_obqNss:Q~tӉ~t*;mFS_h*.G~a!R?שktꏌ O^x%.R|VLmfnWmйusQEyݻ3 qЙ!vpfbpŐF:>vҳI]5RQLko9!9͗o&_kcueRZLt8³SL'NHS u_*+w|6UCwJ WxȤZto® .k16MFm u)VGYan97 ̐+^ocuIn*S{`<܃w꬯`+Xns/|jS=r7~_A 9K/g;u}r~9!+Tj׾+ɸNmq!l`lt`,e)wqF Op@Z&YE(8y7qp XoఆGX)i{zX#G-iGV+ K,3ƨe[X; }KPԦ[m] kg;.}3Ӽzao:^lx-,oKu 19A2A.cDg2d!8rIK=EoTp%ƋoX3z2۾i!R@yHGv`x!^ T{}ƴ?VºVCR\e^j mOZ D\`>23MOeֲ# P *A9ob.! \3غ8: v  &a% ]%SH)p}5D&V.YH' &U9Vb]3-NkK3)͛o:=cg>2Y>mml!BjZ˫/W.%\LoYOX kl~>EGqn>=:>J~(l?v<)}wh<6=Met;Q{^y/N|/g0tcmpU!阕bIYyɵzד[.d_?hē>Jw˹q,\FQfS:#_NbD3j~yvtW@i6{yd?{4 {Z>)\1|zp 'Fמi~hvz[Z?%fG;?0U`v 1B<'pln!\őxyS{ȓWԪ^9G:Z5Y=L\e"'ǽ~r<9XLr쎇 ju\5g-G}^H'n}>q5Rl??i0w Ocvdvx' ߿~?~x{ o|'2g( MIA~ Z[ k mhȧOWŸ3;K яnROs9XD;}>jGBq +&lb}ʄoowfb&y9-\Xp9viypa.k4/_qXߏ'/jqttkO'әݗF,i|7Fr͕eE{#х(%'ǿՃ`fC^moJx~\-@̎9BN\[vC<+il2Ѓ8dg{(Ƌǃ5QKD4Wf&٪bъ!aeWizOO+a}K-j,ez'=uB|>a ) K ϑNZ63@%kùRNggR%#sgk]}CV8zs{ek 3r3-X/yv~(_bxW ΛplUd:^w>uAfgsDlC6+ }~riri+*)>*J1Rml֢l 'JAK5ximcԬP!AagVC\jhS 'U XS M'fߝ*{ٖM1{/| ŸqQ2IrP5d@>[>&( D(-B<F:k֝ ,B&6b["{aEK"t~뻻Y1t-)ߩ/>] 0i˵yߞjU]]ò|*j(3fSŠD"RYkA ZJMI-+TFTgd`dJBbiQ UI֌sf MuX.|˶og X^L?;ǟӳ\cWESB$ yiѹ#2bZ&P٥a,Tih^,F P2`xBP j檲5`ܯ\q1EkۢNk>[" K.U e - V!Šxm[-AaǜjYF3]SXegPvUQ1j#8ơFwM1nO?la`;ӈwrCV!{y/G֐ UaH%c4̓aYiq =ʝ"PPxt.ZL2I:k (Fq(rO<9>-mb^G˭i* pue-Q^A}uQ`@d')[B^/a\vbK4D.BEx9*<~_z-YE)eM5Vs6DƷFj0TȐ1`,urʍj[|2F1zL SjAlAgКk{c~'c[$c<+qnMw^-hkYŠr3-=Ri:_k Z;kߟ Q|X:KLK-m1f՞OA)]mZ :gņjkH%&w(fZ6>39# +smn2 trbo8j۩~:n{Q}J1KZ$fqܡgBG!UP OL(x8Dۈ9AQ.d:UwrbA@"z*HDO Q يW!tbU X\R# (;Gڂ6 !rQUjr!lTI"H;]Co0rD76A K|; gvƚ|݅՞jz&tA>ހCq=JsT@` fZ,E:N vYeL`n#h"E]؊VO`; |ІZXS*Ѫ}SaZ|-#j&G !qWWo>YL:IFA,ѡ۴Gl}W|ՕmfpzSJ3_QT-%T]t)*dEq2gkAWJщ$tnxIL!%=0*V(AcE1*[KJ0lYjY"^ \ΦZBE=9T`95luQ)bM cWƚ1 V c>@ c!Eׯz =ܡlO lFO!C&J@rԮAɁZwPk8w~Oz͇iEah U H1LX*0~8QS>;?ٖF%<*c&hp2X0R4&D[+ia&^(sz'mímܵd򢷦`,𹁅oxɎlVGX6 镌EXČbEZ`N<%۪qȦ-.Oapu&9Osz BO `g 6 6i[nǰD{gct籫 ǟk9i{tQhHZS<&M̀ ZjX}׌$<[) PT}J(j͓U |Z;d^)x᧢ejfRW@ۻ[VऀXκb5<+-S5_Ҥv&Yp%> פ9jA  pdFRtMdxNv@{V%OlV[5*>gդf9u!]^]J2 F} qymfiqaa*YnPǻY(n77jKH|&%3QwR߆AP{izn19L?OZvn_v[[}N S[Ãe01z,Wk͎􃵍v#f W2 9AiU@;A57ΔٱA:(e`LZHZ2mxAYG=v,#'BUHeFRDXJ\g<(_03GtOFhU8(A; ZDlA3ø)a3ƾ{苝5:j\><'F-..zZ=} < v32=%GWJqbSu%&뫯*ʹ`Gt ^w4], #qKd$.o_A1fp A \C%W \SOKC:\=a)_Ǯ& Wc:p8.)9,+Sz5k hߟMK˦`i~ ߺ`F|-?lzMC^b Vo)NQ'9BO7fLrzD0 + L'nWfoQYg[Ƣc\C.=h< .fdrH8+_XG K4A _.PHx3jg>LLj9+j jn7؏o{n oWKz(K\oGV1_I"]Mp#9b8%ї KSLXҫ Jaჷ#m !㽰4hoJ%Y<%,(N-:ha\j Lެ -zg՘IDxun5ͭHTz{cak2jl~ vd1SKSFݬۭdBVtT7w [BnpQ&fLh&f IO/T=xN)8",/MvNT؀.&RBZWO˺n0DWms4+вeZ5(6~xctrzM;->2s5v[{R zG.ji.l5w;YhSnyIw"~]fwbD۝ؠC([nٕJD->~kw.![_D{+~FNi LkEtʏqAhG&8PCz&#P`TꊃB[L^jʈhA #(H8H荝5MT(VhR>Xj&4 /v4ASlL;~om;}>x2!Ă>4fA:`)GMH tT1#p.FK!-Eca/lk ik cnfP:P0;]Z6$ XH <)*LII DpC@,s$;7v:$[o !|gKSz=MޝuO\KbFh?l^SLa6L08#  e )!DB 3!3 JJp|g>+Г!%:0Im9=a+yƲtcƆ! wD$< OR׍u?!p ^9^QO(Qɠ$0Z \ҕ*K}oRXB"Ev?P2SRFa!>ݙI_ē7_RǝqjDbj48CR2X 3Z{ \r6O9(v0؍e1ʑR{ ve0iPUA]]8IfbzmUw }"!P9-ďѧ0|&iU.S)ۨ>O zEҍÔ,|NOuq*.m(,qpiEb ^^^=Z?7EM/hqveOVw5fpy>bk<ԜF0—: y DC-SX=sTxn:f8ẅf(rs9yLO%Ozi+TWZQ\}"Қ-P7e"ĵn6 tmsFR角$$k~Hݯ#ĺU i|(j}r9tbu6o~8deeۖtgn}z ~u9#{r\F׍mc{fjYb PܗT+bwϤi2i2:67k$vu_Љ><#TH3r˭4mRQ>i14őD- [M&cdkP@JɥgT\l⠞X$}{8=GpsG3%(QР"H['(Nwk$c dC2#C4^OVrWrWrokIw%ov=䧟0[5JbSѧuQ7O5rQ~cg-۷|a&q<cKخ9Gt{X~wm$G_!KGuW㰱""EB?m2%kA>DɢH"Y`m3~_u%M(W o5jk|dyJh4^Ч˼'~\8'V7N:o䪷jxn繎<}2>ѧ<AFN]PsÆA}7^߽y{ 󫃷+NrE h  7WՏwx5-^V:wZ}y1;%U xwz8ܻ=h&[J*\ 2nOp'8{D+_.K>)Bz1Yj~&}~GỗklN6^x6:1#1FTH^o&vN6 E+%kW:[=k ,XdcoJ͈ Rg^Ec%U|* zwM;y-+8_;_yyhavrŶ~ٮqYdOGj~up^ߜ&S ^4ht7( {"#ދ/ QǟhzzԽY֐W~|>B_.4S̆M7V );zooi?mu^Q->uRUJ0O6Q\2SQYw1x]akvmgqr'HG"OIsd.Z!+WÙʱ$G+kkv3c[]}ִ.+R-p4z{ac ~oM?lOIYMTeC0=@utVd:p(T$Lr2bcR ֟Re Rs9[S $])ԵxNd`E꒤/; ĹߵMPSHM)+ʓFGP>ʀSR*Ɂ/^1UG',H :U ]M$(.fSHg#맔2f`$Th"`[$ZPXQ;fE֍kZBW˶x6v6Ov=1KKu~Zhy/*s= 7j& ̧ S0iQqH%zGd,6a2JE,Ĭ 8Q`\pd,8)2Z’R A5*Ő'Gd`ωJV{TU%U;fd쎫t͌uPu{/(vrZmvخɗI74p|v8|]!l'JIڊ&='VQK#欂b 5-դ p7Tc'((*ljѲP%scL;Gilvl j{{ 8O ?CeahN8/C6t6Vd0a1} d$P[QgdIYDh8mDINVޙ87ak"+0nMXuLj{Dom8D?l fjB V"xU-X֛őd hE*gB l%L1%G Quvg›Cyjc\{\vG;f֢fL~"IVW]4I&D%Xq>pqk;Li{zY/]#oM}w>OF1-s\8ޏ(~Qmf| (!-i nBYaeAET"G*ԃ[^Ibu\S'Q'i㑬)F_uX.;MZ)TNw[Qh|8_ yI?Qz(ik(KVhJ)C $ֱ O]Q)H)[TB$/&PNJϚl&dWD8ڈ"D@]n}s+Tdu%k٠)QT+a EX/NKd3}H)֫m-u|tn^" ڂ`$ nZJr{DqRmzUo {fPHہ<-:_\ jcޟtW745W2&X0@,>(ٞ NC 1d\HHʇmJvt{owYe@F2ǜsL# خ\jߏq̂6,8}*)f1햓Ĭ\{vHAWgGã:}&gZ\MQ8PCtQD/B|P(gl)&CS"z0JT\4 }-R2ftu/$2x]fDZF3+FkcXQ;(\5Cqi  `Q($WhQuĹ]FmDk>Y٤kLxfv ǚp9{ Ă^{H2eP;TvwhZ-ەUZ0^J/ZJ*0N*.KF¶UzzpV\îwnKBFRz\pu' WwW=1x7i5<\MJWpzZ{A}WKngw_LgST^-յt'8(oЃ_=O&CoL#U#` v=Gl R-ӓaNďRؐP5t[?<.g(QȝTۤ1ژGo̪ {v^NƇyަY,!J}aF[,A} sV-|n$=ã#{z}r՟{:X@WD xgSij-_RT%ߏ콌*Y Khd )*HCM$Rdm eS2ԜMP5ǃnj0Ty 9XkCkrUZtnU) 'h)!b*]*-Ur &\dUw]&t=\=Ap8~_]-#<Ӓ/ /e,?K>Ύ>O$`6&G 8*MSPZ,ꌟu{4Jcņ%\g3 7~1O?-Ͷu?6-,Ew+%1Z B} ɚ`y8RxRdp-:ab.zEcvl)`.bj^'޸'I٥Ihw%է缐@kɏ}\%&lQ&B*fRC/@_7 'Ɲ+%u^σ] :ajձ|g?O)|w͸,⧾|v1˯۪Jx}ӻurz+`(C C>2[avo֓=^\y<}>xה>xy<}>֤{RwLt@p>xy<}Ύv<=%y<}>xiP鑪:cc0+TS}).5l)̌IG[F[f`W5U,rْ&@TTݚ %tݭRF$ݺ:_Y,uՂcd sL,:%XZg2'S U'%E-Yg]{t%db6Q'G~ Kbjn-:LCTAuQ@„btBIE4§4vg^Qie<$, UI2eD'QC PHֆ창u&-ի߿5_3Q8 S$%Ekj8K4@)4/G#8N;tEd*Х |V oӏ Ф&Dؘ]ѤdHE/m` jKVwT+vRo'=p4<߹ j!H*IEC*_22(mY?MnC1&`2#|HN U$%6ՔhbIկ^W;* ;eU@r%8:aXN6Dm|cfoOu6Hb>oxT-r(񉎉hA8"ldR0ژr4&b7a< ,%LĘyMwBCCvLKt.I:3x; CjJs E" ` 6Qٶm xO m(USZsՠNr\j;\d?oH.R .'vݍU:j ->D&=Iۺa*BءQMC_x$aug {>ZR/? \pAxPof:CE/RkѴ66A~n\H~oNB}ѫwk~Uָ}rq_)mM%l^y-ƔuQnȃހ=:PcZrT j,sZbD ϧ_jɫIT{j%M%Q0*âD#N ƒ o#rEjpd!R&R/5eG1h# 1iYwf˧ɇQgWplp[!_ʭv-m[rq]dsoîT-*#h(Jyn{q5 O종= kGkR[E$kOiNcm=Ac,6 ~et` a+G U dMi{,}}&mB6-î8[/dY.BK'PeZKyy^+nq_qՁc`b9H2Gz+heVu60?p듏> 9Djzyå O=cF@1g`E)j$$M5WoI9Nda忽5J\^l1y-$Y{!U Je窞Hs4MVZ8#Q܁q,WԻRꃏeww9A'8ƌ`I': NH1iLIL-W`П1Oɷ* #nѭ dq960*HEMlq(gTBx/, [ROD$D`I= :h\u^LfVHY naj3jX$"DLA5MV22͟b9^AƃTMwd^Pop*}7_Q[J]I'E ܬEr'j^ԧɧ&)BA'7j4܀ F0x4:/G9*,Fnmv>x!4ë߹͞cJL=HuّO6zG֬_ڶvuՇ>H;?6!\a*y܎7W|y:f2EV9*k"roR-;n9~d84>Xj&T~ZfPT温$ uҒ_=_ƍ!Ă>4fA:`+GMH 0bF\,jc5BZ4^>yg%1@ik cnfPwcaw mH1`"{0"ا0; %%% XZYh6iQz{JgͮIoɻUxOo(+ v}Ʒt=x {B7Bd:,d@8`R&FAI).gt3-Yhd=gqKuyar0{V:eűcƆ! wD$3$uݸaW2,0>pR HB CEx/ȅ͠(̼Wg׎37}բf](IڇG)E][v:ħ=K]0Dՠ-B˸ϜhcpjDbj48CR2Xf'pɵڜdzCZ("fXfS:` 10)EQLս0 n4Me E`;N`YMnM*x3T Ȣffe 21u{&jQhPt6dMЛ vJFtn^f.m( ,apIb LiW6p_,&j^ڞut5iӛ~( V<5(B12O3h6wV`Vru p6^.qs%l`h@2n#L<'SIe<^݃)T)kmb[/"Ҫ QWeĵz^:@ }(H )پ-%6jNcGu|vÝ4JwJ-!5?ebP,]rv7$1mS6@ L๣x[+@@7>D.j^opN} s{ C[~ b MKzW/:u'ZLdxQ_`<ͯǾH3_l̼ǻzïxtpHfL._nO3~}1WT(7߻N|(P,.gX>%<ςyDBX mLFd:ؓ]\ZSfT{9"=c^Q!RctϢY ռ&Ngm9`Q+jlx>Z<a)ތߢ]|{Bg2UT*}sWϓMr>2 7qٓutG~z *lXVi-xW`oYyMd-5;z7| .ggjj+tnӦ9" PG0l5K`<)-3*uTlt~bA (h/?=}0,$'dDyB\XNd*"mXFHqy*E)ʌ A3UrDיGq#ڟ^nd?J9MBG“UpR*{¹ ukZ_DW]f~n2+Kggu"S$r!o^%f29{Y?h2{¤*޼KǫYx9w4lCa[#x\ E@0+uh)Y! 揢ëUuPE IQ6mjBcK ^.jY^'poTjG.*xMhd%:h7QOPnVvgx^2 {pgntcڨ`7fwp1a_e:W%bVO-?r?3ݘ3unw9c%"EqGUQTvك;nY~B5gyG5gf=P ;ZjDz)*h(Šɠ#؏B{02{B4{d%r&LIX8FҎY |&A5^#:;Cei7=JTn{n Rb˙k*XnW*s;R[A^†rYYl>8K-q0AS&kHR*# @{.ux]E[^G*# FǤ"%g€^TqXE$ ObCƽ Pjy*_X[-!PjUC`J#N9BWҲTaPAIýplyM%ZD0RF#fy&-UThe.Qi$A{;d # 鐳 DT\x:$ЁG)8'9 V²Hg乲$k>ԸEa[8 M2)kfԼJo'\$/I?MsE(|ɛUn!ɞX .C_#vnPfUOzX!Ljw%|Fvxכ/za^S (N[lBH,(˥CRr\fR;X8X_a(w\Bb ™޿ wLQ|7_ ]\iRNVX(HsF5Gi3ZywWߗ 'A|ك~evY9<ηō^. o7-uq˵zΑl<c(z Zaa5$qLoi4 iFi67ˏ!4',󟌟wBsFŎӨ=kM6+gլt:j)`|3:ŃObFp6)1PMv']ks7+|]CU(Te\cgj O1Erؔe'M| H v888?/\|/G_߽w߾D?|3_ LKɺ[LP X z)r}0o|ʭ|n.2},T q?C4'3~sM4ViB] BHGALG+z+lb+ 7}$G c+xMI;EQ<:fWTC\^;H}NZ/bIC$b"xeVuQ/e( aHeۙ5!+gG6 + f$Q -9+R 3]eKgJj Yhuޥ>.IFZ^A=*/ C!fHM%&OjRz\Tkn blݑ7T.𪇭ki@f+@#F۫0M\ 8^rsۇz[mpt;l/ek V͙Oy:msf%wUy2yZbuCP%hş6J;JB%Q{r T*D(`vP SOAsQPu ]n'?Yԕz"Bh(y#Eyҿbo$! dqPXjYT.`dtXq;oDdi&E5R` k( t ކ} cwNZ/y>Ս .9X=Og o". *m$lYHr5X#I\0*m0 ci/vk9z_}:Ԫ2j}χXh1GnoǢGk) hx\`̍1ĘJe&#QHYu2LS2Rk5f,` FMFS6f3g(}Ht߇UH)0aN#a+E( 53`.)lHF0""B Zqjkh-RܤcV`&=)8,m7dXP* Ir#c6s#c> ]dBaS<7,{e%ϥ_(`0)Sb'15U@҄ 0Z%L'9JTC < l^.K ^Cr'@,$& yI,FJGt`ZD"rReFlGl;PP;[8&jw vf9FF s) Gi xm +#88f!3 hG9AQNq >2Ga8*dlʩmD&`<>WifD"v vH; |8)ê-bV;prScz I͢P29pFBb%#1)DXҠO0Irf,W.f֙슋<3.;\\eB^eeȏK{d ՕS ( X^t\<\cW<pG=@ؚlujH8OFQE=nE?PV SZdG'+k{$+stiy<a<Jjy,pjwpݰ B0 VL0\mg / WYvV.LWt \ZHrs]<G_-ir/ F}!~FWhZ6ox7fgpMCew1R#i0t40eX`:Y+ġtRBGéA0Ch4}j=o1~uJ֤MNb<0PH1W7;O"aL:Lg{z;4_QvpjehzյzMv0<:|C=8Fwcq 騞-wJr|3G||74I(<GCxY\'<T#`Əg>OJ~,9X?}&CVQ`6>}Z8ʭ5O)BO]}icWQ |,ĥtAWpQbF\,jc5BZ4ǺC BM70"$#JP:P0;]6#;BM0\M(K`jPMܠU%VW(XW#N|d=? Â{ nI9fጺ㍶I<3L|G>ZC/ y, QEJe-!l%i,ÒjǸ1DG C$$I&՘i% 3U7곍nз Ԣen?rD)z`z[/4B E"_8hcp i5Q "15z PDŽ, 3Z{SPC :t\9b`R9'#])&Ҡ*0!+v`I}4)mPO!DVGXwe~PǨ`.zoð JoF jJ&]kN^V;AjolVJIM+mS|U(OOGqzK WTOn]ߍQGNs:py&o0#>,OCh0)OçZ 2ϜF\uZNaFpTnтuƈc(Ûu*~=2wlRDU_w<ިHmu[E5[* a"uSQݨoٲjR]$U׼N,+yEsqJW̕XB| 0>aV#^vDs3E:oRK4Xqna>} nʔ|&vg_.f yU|3(:;K*,OL$-7ZX(@3EBd 簓&tp2JF'4H"ցkɘY!Hځ^&K=0a{3>ql}UGDQJ:UtK-/ba s܆Ry\;a/ :tV{by}IQo{Cv07ӫH&~?3Fj ^) !nF2뱋$ ^˭U>ZB,E<yJ98y-Xhm 3hQ9b0XJB8PUt(wlKyaq<eюKsF>Eݯ<,ւe,g-]a 7lW(nLS"`$MRLHxʝI s08{i;mkXhvyA/HAM)YGK!zƌƁZ0c<%UL#,8ZI}+@g*mT{ |6#W^ao Ҥb }Oz\au<0[b[{v!t [h=9\ܚ&ٮ>'=WrOpO'٫y^uWixfm>wTTӪAF0ZZ \jb|f$6O֒y'1{7{7;y~sb!/"9|)E .|JպE%7/tU}:`ߵ.TX7 \ K1l׺pqWޅ0\ ȘQ{,mv,472BB%c@%`=^Ҩv!MJ.%M=#Cܦ=pYxo9@"Hd.s.{gkn9\9[7k!U^aII_mi5N糶[4IAMf-,A2*}ӣEݦWF[D%.UcW`R"C.1`@st<"v7+G3lc@ٚz~(ۛ݌H4-xLC{3^T @_YD@ɲtsٟNs\v<A20I^':gbVN3!G5B*2ctFiΕs<1`svWC:nglvmǩ؎A+Øv.Hdxf.XBheǕN&:ɐ`LV53V0=' Ie*DI$"pU$5r6 jSӥY&N\|e 7cap983:ӡ6jO>e$1 t`U5cTO Pe.E̹F͵tdRm95c{Jk5Uu!tޫ+,2^}2=݃fIѵ9aϓًb<-U#ʖ54brvl`6ȇ3rXgm75!)F^X8Q $Jcȓ`3B+kY#Ff]̥u%Eղ^N/vzqmDYdQ'tZIbH\%)'(|DY]ӝ^܇^-C>܃ k1%uُXz\4lMth08SW< u,'_J H}اvo)RUK)S+awJ~%=0ɕK 1q2:A`%Fc(SNrL  e w)q!X`h@zC^JtLBYCQnyŶ2KqMlԻ-/CEKJ Gv9 oow.)3$K!rEwI΢ɭ\͘u(Dҋ pE@$'J'"Iwf#ԛjh)Ηx.Tap9-yuy A.(9@׉hAsC BR p3Agf(bܳ4Qp%汅uc0ϜxiB B[3N!i1 :^ C4=Ҵ:¤:4o\l-xR[ϡ2C6݈! ҟ]tFfަiT6ZIOzR NHn0jDdhJmZ%Wd)^]Hߞ.znqRcPz'@NjOMWH.wI?sH3>fq:'`Pqjp6=&!#*0{;X)8H@ 0Kh̎Όdz#7s?:^]ƮV"=7kB''KK.3Rl(lu9oWSH-^m~u](^Q 4wfOKhCT?7w^7^\^]xlA }=SwJ{=Kp4򢮶o14|=ʻz:_ՍXݍ,3[ژZ?Q,h8ybY[%W]JG\FJNJoI(M_NSo{Em#5yViTO46{?~GRG^~U鿾ޞr//h E {\U!೮?ѵmko޵DآkmlW6_yEO[;Xrkޏ^ wi({Wzǣl\U0#D6y؟¬ݦ yJyUtGVK&#<'}Uv#MnZG'#i.#$d$ ;_qE\ސC =xB#=xaZz8ځhwHc4,F GFnd63Ld[Ŝ0hieOg..LJODjB碽2v9 wntìv'm0Zno#ad ?{{5 ?F7{^yZHgd϶-POջVI}w6$jmp,M4h x[Cl}EH+kTQ8|k)~yr"_BIĽsX3w/ L+7qI CFAh`u%e;/*ׁy j˫j|0&۠WU*n5YF|}Aȕ4 RR).TdY"4B68Z:C*\з006Z#gat[ ㉱ū)qxrwkMi}q`FGg{LG2}8MhYѰHO3DAyTK毵Klmsh|˰ezC,z2{욟Ǔc"OQWV*jTҲ@ keEՊ>3㘝7Lr75PㇳIMb4mJ7qh4qK_nÌ5Ov3v0UFLjILڣL"|I?4]7\o-`k{z(^W.&t0z- ҼM{˂;)BӅmNQn+ L u&gRXLXzr??kP#ϟ+e&O~LսF#N?*.'~Z݋ZaGvf2$QVzHXA"Phس&쯳h]BNLxw6{f@nMZG3ȋaQ>>sq9N:|}Qezct 4qcjt9YZ6^g άiLZn6A Fuf nP.)6 xǘ*}"<{$PsมpX7h7tGZnhJnUĴ .kmfD)uDluk)l3ҔWvJc=X"g94!h%"ވ9[puc(u(H($tH)IC\Bw=MNR0-*&,炦$C蒠N j'GcQK2cqzӴ9eمGaJj%`>Sd(d2ȍVT,We ]@I-ELi'Wd tɃ(|1,8L'c$ceǬ nu#KZ Ғ)-I Xl9ۥ#1~ e  U+0p6TJ]"S W!V@rkP]6}wFq[%XxF-MvBT6&5$DL]%q %*#HL(ުP[2o5"F TzDP:eBQVX&uD+qs|L w:Ûa/#mpC16D+ `vz 0oѼ$_d   Kw/,/]5j,jd妷TmrĻc~7p 7]ϝwxk7_}!NZ-w<+朷N䅄<\L0*R* TAVDeNGS7raԓn.5dب`+Rhu:%*zQFJry %뤛+ $I{`]eID-bխǒܶ$-v {.03]roR%fB8SR TyQ|Ŭ$98~Pp\w)/ EA`za F)F^IG!ɹ?e G()єusi=IR:4@T%3) g9JXay<̈e@1[L+gߑO/_"{HVeP > <䂑. lԫier^K+,Z#me[ᣗ2>\}.jQ ]QB%VQ$oo~!Ć20-ST zx;G&]yrݬ'đպnyWqVym3[pu۬yD1*L$nRewt7<-Ceji781 '/NG:yk[ %(({k3_?O2!R.#z: 3/"YPcpRtԓg2ޔ@UB?Y|q<[یǗT Vy{? \|_e!@E<.LDJƢ1ОaUާ[:Zi H ""|l=a Dj'":QN`!ց@5 aS@H\J$АbJk2gJZzvTx˕A ůzx4cb< ul5+)?އ(C6ͫY<~!~6\Aۏ]kp=>VS! zB 4jCm{(.5\֮4o=So>QG ; p׿6Mךo*'y/lNQ?p\x=N F1l'q~XO-a9MQ޺Tppf[y oQNR,RԣdkΔEbulǏmoCkN/~?J#[yKh~n U˴rc.v,B~>{t-;yjXgQ6r٢k9 W4{tL^͖@)v 9Y.%zMs;'6+4E8M,Es*((RLIJ*9K`ȲYóz?{̪ܿ3z`&0!J$cZDyũUZqÅcI+~yX #Q]-XxsI  jDTeVhj$QD8>G:S9h49;} -Uݿܸb+/݈Q:9m!:U/8>Toړ-}QWLs=Z!3o$(׀42mѢQbq_|lP/Q֯n ֛SK"1`p[YҤBMɑ/LȬ2gMT١6OؘAf|N"w>A 0jmfR;Xr/K"CĴ3qV ["ZdѾJO o)mvQZbg~ydE7qGbzG%r7#W\lT:b&:`U b)P=.AV08dϚ㭘4fqm۞{q̧gM,'X3$pȊHNPx=Iڇ 6u8}zl'O4_&buϽXW e5A!0EI)"AKY% L`;eW'T[JкdLx=Ez# jY ^8pޒ*ۅ i_[l`TDQEz2Y#Q` R?e̊?7zk5TG׹:> mws;K-lx\-@q!-5%bO7_j3~:HQofǹvH/toųq9 c *54w6qMCYd}=j^i7tw]:_xQT2$!$kPѸ$B|O[Kuӥu6Ӣ֩#:D;a{3k&'{?j(-"7c7t79W3z׀VZ,^mD:}␷]?V>jZ?[կ+ o&]&fb_I?(ijѷ#p٤->ߢsiX֒K[s{K'˚Q˛񰴙˓#U'ڋu=hҟr^,9%nuZ]Ejn >9:, {rWS{?ЩtC-ωߧ{zo GǷo>gxㇷ{`BVw]"^ªuEAȇp鿿+]5͛+4˷u j+/iv[e[nϷ?N3|~؞xԃ]5~%\_O+ǽ4{5T^ (I'Tx#A{R9_צԏґW#YɑC40;B&-9ŚRSHOnlC^Ӯ6ѼFy|eN!cv % V˴t.¥tGėN'g:dٜXɈ׎hrn;v۹mZuy|Pr? i<a3ƂGLFǥwjH5En3$D ya[6s/ #i*ՙ@뤽hnyy]21(e3zI @o)zsTfo"R!b\˾tjn !]:ogǹHz Ҡ7nX3lc׻mweC?GRe͆pHrHe//\Wuq=Vq rPtP[iU-zJF ޭ6WJQ1UhoMRPK ZN4 YŃ1*ٺ2'$+$,*"BtmnG#^ y-5n擧9I:&<$+|*kO1'(E dȸ$m ZPr,{$ަ=[y c`^= o37 d<+=|X6YA0n:<ҁb,D@rb0@=[-6.7(; j}' r1vP0;,(V,+ A;(YS (=Q u-$8H] %7ѺlǴ3qV7)ۓM2Ә*ra&eky*$+tR RJ! %YC2qBw[ٙ@8_ʣdp'25njb5$# 8F5MfjgܿP`T'ވi)j0 u6O^z>dt-E&><|w99isҥ{SOjs%S{zK)d)*H鰤m&V: oⳅD~$^(B0.x4:+x)2Zt%@j!C%Lh`r`JS$U;~d쎫t͌uPur3}zrVbkv?Xb|>NΟЄx#v5%`= YGZ!6ыӒHD**PnF]*{!+@QiUaS[,)]ax[v؝s?b _R17;j[{ â’5E3 Ne`M)M S<,ƺ" !33Ztk2d)L#}b.&rD^!ts?J ]͏uQwz[Ds 'c:sLXaQ@)jjv6ZU5X֛ţd? huRgT9&dӄ)&$$SA7!PGRF:%a[\ӢW@?O󚲼B=YJġ=Hj ZoBeՓר-QS=s),,Cy|/Sd"Dadq#+ٱޙ8+('|CˀW`=fmS-:B*W9ɛA@958jM ])TB$oz)? e6#dBAG(BD|pz[Th}.1I,!՜QETac!"e ] xE,]t')\{]mj׾0$j6(Ф$Őv#2i)Qz)kl؈)7}%?'m-n ڞ^<+r͡sM-mPs0"ϯv@i|@l&&Z0XmmrMN]9:S-L\)Qg F"T!Y07Ӿ: nC}P#{e_[kWKqknUJ [WA!WU`/^ \W,pUcm5w]աW$Aq8<}~7=~L5&Ds;75hhD\ MUN-A?4?q3A1zKo8e|oy?Vfko}ZT |wQ`Fg P?.X@ LZ4tr[<6SZ>Qs<<ΒՏf P#{Ёز! $|MMQFtvoD҉rvJ/lm [U!%mRYfϐt)Ԕ^HkXAEKޖ}`ڥ3(`||t;cmbm9[y1M);+[#= }٠fu@amS)V6TuN FZ}LU<ZqH *#%6TV)$o$s6dL.Y'a') .2R8WF Bo/_=9n8pS|j3z1<&Rgԏ\ϓ?|HI\#e HH]Fr^-xzY!g5-PUf 41fd+,QvUJR-γf}<>6{ϣg.w6#jc̛PݏO=# _c=d߇?8w k;>%FԐ!) ߷z9E$ڡDLꪯմd.&=JP*W7WYLot^f2ͣYV%s(o_]yQ먌-j^*y=LF 5o7}fou5+ >x^o@j/ͦMm!W$L.<}>& n66g["b%"@2:79uK>U\2ϫD^F&_ʾF^f&YjUTJVOh`|~~&=-8z<&)oiy7X|m"aR]iRdփWC}%oJo7I7*2&̪8K/>|-Wx)?O;p{a&Zj3bna&K,^G׎=,G$]? {(i w@^s+4{(|1)e` 9:B a{ #z!Uxd!B2""&ZH0<)c":#gK7S)퍿o.kc8-@ޔl/}0˪Ok?\\>}w XИ鴃56"%2 is1X i)Dxr @0 wH X{ghs+5#$ cإeC Kpރ>Ŭ2]())nb΂wDðq]uFp}x$OS~ ;XetkIHJ~8NQ lT?{\DEؖ:\ gmtUbd\ s:lvqUO7Ys7s5Zz4,&0fiZؠǠͣ^$TȍGͳ?7_db<̎M6/2_C-<E _sx(V I4/j4UO6?8ͷ$93A]w +ӇçV_U?WXk9gZ5`R*[Xڋ`[תL5׸C'@'DO0M{pMDm~.ޒ=stbf'4Xos:j޶HWe)t2I*eb]SŝN%c?хw{S\s߼, Gh{G0l5K`0멎N)-3*uę>ݳ!E utt@<|r̎w̪ L萮EeDyB\XNd*"mI51O!E!uz]<˧av5k5lm\swih+9J3.]OC; ^V}`7 :O:@l`=8'On'Onr4ݍJ>1v>^pH(w9u:hô؜!r͵ȱň8p@9=)ʩМXP ˀ=R FX✀)F)ySB}\;|Hv~d/ 7m)y^ Ŷ[7BN挢{ӹ]6|(O+2xuɬ.ZX[̌". 9X{J+ph5Z`(3*h2:ڀmT 5T!1wFVil Xl뀱 csMQnG9`6xl_7rKu7`i,s-.gĶR݇meDZY|,ޑ휱Ta'ZjDz)X8A{F.epN B{ B{OCc/VmD:R~0!S&aJ;j+dr;Zv5c3r4tZd-U%FDjڀۛAob|zu{MI˴\\3rS ("S,QGXV2qZN"!K$b*-<[14Ad%ض]Y:RY@$؆1d^a /*͸Jb,#Fc`ចDŽl`V̺~Q>X1fD<&+iY9PAIýp,yؚkkjA (!e4vL;Ϥ %t`I7鎌Ke"*.@LYVXE HZ`@0,2#a}|88ui{wrͤdͧ:e Im,Ô\|cG1ڇVWt?i(S3Gޔٞ0!872=űF%gj0u:b'XP"MmLSXa@r S7F3)(^gpiz^~UЌN[lBX,(˥CRrL\ mNKhqV櫇bVfbq2F&ew>ecS D9m/ou}::.G9Za#Ц*M6tgP4$~9l-#ˉɓZYU5G:hY`•u3T"U҄.DQ$@Lr]?(^"(ᾬ#DF6q' ?q/HSA*z>\I%v#n"EHje%)FztgC#{Gڞg#(H(eK0|QIAR^{g:U]gkbl;eݼ;ol9@6tYVv&lGKW.Sb(^sڂ8ʎ[uW!@q8hG`v٤;xPLJp ͔`|%+5YKk״Vzr.`|6caLڃ# $g\Kr1] }ِstPd3(|CeaK9ʹM}57bhQ|';K6@I{@Iu^4%w'kR:_PK*h5Q-*mJӨ VeT8\:oYVUUnX^%5Mz\Ms0*Jg~U$ ^bU QV-eHlѧFɅȭ :gV\9r&DEoT^Rh^)g(6MJW3 Lph&nJuvی'3&sr^Lj }!f8mW,w a7e#8[ȳxV 4)^(` HVLP}w\XVp:4e}(KǽQt ԕԨY⃄!1\0*iv &Tk_@|߰&2y/{]F,q_M7 ^7+IXFVk8m+F sHD0s_4?wہ"g\!r -%& `_rXKCUwϺv -:2'"<]%HC"<"1 Բ?`0L+gaUp^5. _>v\8j,IXs;SN9{N+0 .;tXXCQ 0oYxwuoJvta,ɭ>kZ5/{{"- W`VY%c65yvЌ U"غaҨ*~T70?zMܽ:){OGtL7ce,:u~/:R)tAE(PE6𭶊˚jx/̕~o_>V0PN)93Z'Dc^3K>l32)np09sH[G‰uXԽf,?0F4MϽ6Bz?FT5W|8 1ex9Q'7ray[+\ሢ$e+-@R^=ĝ=a>waj^0#J-2FMɳZJ"$.Uހ}Pd&#QHY5:P띱V D䁰"][s;+[kKlʒ~HH6 ej mt>y9[v$Mه4ϡ0E{6\ ! ?$ ymT1j(!cGZA6/jIr*(Sm>[I:Lހ"ԮW{(unlrdmɷOe4>n~r"g9wE_BM[/->9K 'OQy~ƵWbm(̑Bvc^E^<^|:gm 2>z:+_kVROC4VfصJwc#.|F4l%>f<q! ?~r)N/_'/3wo'_X. WyVn!Js[f%O)![>7g}_K0b'/4"00OSR:Go@0_uJ%e˹!Qc7°;`g>{6g3zȀ;l4ixd+Ԗ*{Qf.Ѳ5'zIo,ҕ㈎jzttz1k3Vٹeo&v!C IJ۴%,S gSfht5!D,Um&wM,tpM,X~K[ypqĈe:)̦d5j8``7pu09Ԝ /]M~Hz5_!v~^@,U8oz4-w4v9>m_ra&ǵQ/><\y>^WŽ/+`WkxO?.;}B[mCK⽅vDZ{BZ 7lFp*Ngן澟e<pkeH|APyyZ˘$#nM˧RthϺb;|-$7pF Xlj]ۍDU)Y5XrHFsѸY%lM4vUd*p5joE^lˇ4t׋\z0>끽ck6.׬48 ĽC85н\4,#gb.eEcsF70V 3K0)Gg7օ(0 AXtf]|9$P?Olm{=ctGi\ޗ΍dX `8>gGm!^"7~Zڰr~I}] ov6L6Gz&q'eO;)X8lrӧr_KA,^@6"քŝtl-H6h&Cj/4oc=W:Z%26`e] 2d@䌹HL*MBVY`&5,>,= VhפW(Z5y.߿4C!-/}+.>.])쪉88.juzH h9 &X \2#]#]٤*F15:&s2(5eX6}ɼ_'à ^ ?5ӡ=Xī{Y@cd=#dF])H-~z=2j],A> ` V'2+^<HP8GGm2V㴇TYor `֨8&F[ /})'E[rD 1#kĢMUYlC`]^v3gKR tKmHi^0tU'ӛ)8-7tEtV؍sTl xn8rԓ+ț7Y5riַ1 3\֟ M3O y9zy>pc6\{5'#9rXz]wy8Odoq{Gdn]ynu{ '^JGqhv޷^35o͡ێ{ZA1Yۓ׏yJ_~js^ZM{մC[ 92 T6>{HhJ=Vϴop#Qn53a+"l TkY)HEFk)k2&ugbg {6PlQܦޢ)s֣km/Gђ?oG˺ k,>qWll\ ' >j!GPl5a'ڍ=%.;|՜S3 T%;d:51kbY?ݞbR2> z+ҫ yI; sr/8>WN<)xԂ@X~I;ABLh.mņ˘x^x&y+SϘyZ扦*hQJGo`M8 pePjlecy\ɣ18>9s}*lb%_x)D.@ٻ6d e7)Q} 0]3[`ׂЧ5E2<(AW=.HQ-9骮_uQPR"ↂADSAﹾ_LUT(8dehQRJ#[O`9B<3phu υg.Sr)K IK(R<~QKjT6+vGwqi_^eKλ꩷h0ƚ6&fJ$aO"%Od>f;ɲXg[t6[uGisNOW`v|l^_5e+a`7# }@CoREɴjF{I/n]Av<&N_u9Mh4)ߔvj!63C| $x-5*|v`nb+V|B)C9\ȼN8CrV3eT p䴳FFC4R hښ"uI{ST)B(L<999xd-R ?-lݕgk5X${dDr(,[˼W 6NEET2q(x(Kq(k+eBY*`5aa 9ǭPDdDMN)j*Ĉ&Ha 4cUYHL(YAwx@u2ȄQ!CV![J)#f|⻒7Kz;R[nt;^˵7e@5L!0ws d^;'IF#,U,Wd V_YxaY<ƶr7ʂlɃLA>KڀF+JrDQr9@!k9p'/? 8G;̪Y7JHt\*+"7A+z4AHʊ }ß[ӧښ6G)n1*I>r\si' p QÙtOFƭāLZ hgb}IeHERRprR0/aP?J:#(i&d4tl}GUmYkg56v$!N/pa)bžp#0GOS&?ĪGԵhܼ?MX' Ћm`ZLq[j C]19x$G4t L pU%o, q5r$7\im@pVQy#C,:u!^Tc[ E^ߚj)ttt8;^h*mFpȶ[&{h3>Fd+uR]{uhMk6Qr>;k._UVd_?Lrs9W#p4~>[N _!]#)9#w ú196ᨖO8y.>.ډ^?x8CGmkԮg^:HXhs9.r,6F nwjJ\\t.nW?>GۗW/}{J>=g`=n $+j[~|!chSjhk`hK>u3eG6vr˭/gֻ>-&"~V*?i"kO9|ō*(!RtKB%< 1G׵{a$ǽc$Iy 4 YPTh"l(B $Ik&Z׫]4x~DD%sA1M5AXk4夘$%JA0舷3"g:;RlMl#J,wp#ύ;+ԪӸ@xti<]xdX18hXjv]g/-rrrWvvS+|4xbQ aPS Fǟ7}0j#On[aTyjg5e GT!/e>7k,}^^<;Z={޸^[RE[KBCp.#?I^N{kNVˆ6-򹎪_jQs`?lh_T{׷7d8]#:&7|q\%{0.A@4$u<vf۳? ga}?mf7R Ao6RCzzENB(=5"v_tơ`owTOd]fkx.dYFFZS 1PKwQO$с>ip]"NNyjZޠ7Og2~BXSx e=g GUu8ɵ!]~n90}vL*z)R*v=1h[nˆLYX8K.xQHo6Zý=|"8Ph*յrjo?TL8&R Zsٞ){U!{V1' 0?YwSB@%2 (# it4XEhL8ubN$b%;ؗ( ed[a[LY 8z?u@}M|‚מ;Kל^ Dfr87:xʢO,g^D\ٰP0O{ >6Dc=+3(0{[:6/3ҧv._P މLgbeT֪R "D *KiDBR*EFQ4NScp@y2TDy &g F8blwޥg߻bec޽q)aZQF"c,6Y*dTQ6@g)(F 4p (Yr]He(I@1.րXS$h_QQyӛZ'd1bϞ{8\ԇs @W_EpMjn.{OۥZoI;.7V:yk&tB >s G$BP.-0dZsD B PD`ǂAI%Redd,FzXNW)& me!+, ^>zeٷkz1M:Փhг?};4"%@QI*"*AJvxtTb #s j.5OӐ U 8bK%A1TB6`d1ta]ۍ ]luf{`lN0']ϐ0/xD% KQYC-6xDZIHeӂ"d ) <*5!/YdKTc6B IdGhK'+/FzyX 2 ƝǶ|//- upJ8iAIQDu`\`YF79 $( Xr5D%U(H<Ƚ\| 3Rc[y( -S>[kz1z(nr<ޏOKG˗Lk:<^P6 i(z5﹀BRGPB=[N@?|zViI!{$7GTMRaX'$&9 g( /[ߑ$,P9 *>PBZA=N#`AX Ѡ/v “~*jсKA{"]MJh+rc $$}rEn {N١"AKpQf(e3ɠ!`rg"- xZW{}TZJ;Ou>uirTΕ0hU6n*('SR5_j/Ǵm[ׄGN$@ <[N=TS}R[՞v&q~G5gz8_!f`rl@؇]KD"#~"Eɤ$K#ƀmQ3LUuezUwZeݼoh;Uђ=-M_4AMSѼę`&\LH.4ѠFϢꃢlOmB; l)Qg ZjDDBZ`lzn#-去s;n^HKlT<:^6tfESNtbVb$fأ#tƸGGyYW*>e&o4DlbY*=D!{2HT|Ҽ B-sT3^:')k.vpN.bJ HbZ*@YZMqi IFs(iWYcsu֝!u+O螩OƁ73.j7`s/%m^{#裍YT}&uV⳩UsU{uj/jku^O(4^aI=y8)wITׯ 1l zGfV9=rX5۩O<#:aTza,j,8ol)['<m TR&Ԓ7%G#+eF kF:ƿ!}Ppx(2,-kpL#߇hۥrK榺(~cPO|d>`K,ˆXZeqIA.%8^E]53m.fI 1zR=袕|u (Լ.*%tA9/\HZ#t'*P ɂLal* ّ^IIP∬3&p=zYwg^}yj,p}@99J jo:QJ9Hul]UKRPN:R4 XУ/&Ot.Q,`6Fg@A)=jI=OzBgTG JHR1.oU(C  9\t$(N njg 3W6UWGp9oGwYK@QT0!.F4Q֘|'Yԝ 'J1^ۺᾝM5EMDg:Ve(T2Ti.!&mjW:,"9$*eŸV zS Sݩ U$a;7AfuaMa=Ҵ4Tshe $!ۑo0yiLǔ>6,CG4o^u/`!%'h耇DlwhUsui9W7 >ǃq^Û7 [/K.!`m`\wQm8oں$MPh;tvLKO49Gś|>`3Zt0\6Orkm~n98|o]wCfۀw/1Wx?֧=mc{wAh48S k --k 8̯ii1ryX uu~w>NL&?/?7> ӸYc%q='Is: cj,lAd?M^w&*32V jR3  hWd~[[|d|5戴 %9Z%$Ӥ]lęƜrDFyZ)P J$$e~-NR"zeM f猌u,/Gi]?xQӊq" %3hEղy(+kHbJ'|дynЇp6:4lxHi,kj[_֬<'>1`g G g_JOu4vLc ͘(m9 )230xɅ5Wlm_YZ=:(fOґ7Ez||P瘄3ZYUwga)>a%yZϵ7oIXׅ<2F)Cȏ&-{<*Y0uӓݐV-sz_6P {;#$ޣo>Mh5jkt`d)/q V?yBU= KmpA* ] d^ (8 0 |C(y˶\ľ1T{qxk2ĐumsI42nzm.\Z,`/2WGtKfetP,Q(!.|ay^g VA_8!)2t=k~;Z2exw=ScoQOu \rɸ9>R]I(V̎|S 7.h#S!Iɒ/ 6F!J2[g;ήKi_ vL+O g|cbJ UW*E*ARH9Z hTb!bgִTֺPk?K0U˹D}2/`rRl JBFRfNrwL&I ;A'ܷ"hL|{Zy"^LQZa+tQ\R$d!a[m$^w"qꗦ+R~uD6JF/TG_#"80HPc_o| g*=[ҞZ ՕrͧcPF`8a\BcP"ssru BV!/((5&T\QQh{vܝ7&N'ps3,ŬeJi.o>yPEwJ}FX$YNFUOp]NRQ**II@P+o'{^hZ- oy^\AW|-&&bK}Xu<BQj0Y x6NAG:p=w{QD P͵C ,YUAK%:fHC$P v["rE'=Q(!bb+J!xt ;n4{(W4l!^.2YԐP$QxTR%{ %HmffuN5bJWM%Q|"]ie)SPI&B$hhpk̝.uuvEyLҴcJ>Nų39gUTp4s[\ٴ^{} aUwyOȋk0M>O_-v:fJ< KVlpыʑw$"3T2[,  4Tbj6dp(ޔDE$:؝uv'F1jw&bUo{{#g}pâƊ’V{-AYR@BlhcCNKdPwmIr̀q%9lpX_p/`ѯle~QHT N6{f)bX J$a"s>f sa<[RxYcg8}-5"B}D#FW M8ZJ#xQ+I2buh} Dan65F9InFKI%5$1ikMglDS\4s^g-+5.qō;ZU2N) 5Qja#]Ru C 3D}wֲ]㡪wC[?>v2MWz_wXCڭlg?'Q1-ϝ+5%i7&`0HVO^-gGjԣ;ި$x_I;hA:mǒ`YHJVTm*AeAD/PmtY`*X!rcJ0N *) MgsEh~j5T(MsSsA5ZsZD@k5D bV_BS:B$-2i&,?yPAy F0F995 $ˌy ?[PHhsR.Y/T5 Fg)v :~6=ߺl(ې`$` ]bix W%i͂sA>!}Gn.Z>oi;P Z؛"t>) g™LΘp5>x,9/՞d,;t!I.ap{Uؒ'VMyY= HD9<e iv>pjgG}|&ͼhrU>RVEb.=:::RIa5 tqchv:g2zoZyBEypg3q2Ë_?do3]5bϳJ5%۔\`MKc`?΍pa %L ??w2:M?O⇇OB 1kF"avS' Xd C6l cGU`.I4 CzP+L/{f?Ղ d9*eZ(`pŽ7JFMbd G1Iӏ]֩f;I,*I^ F,Ӂ9ar $aI4NK7Ry^ 'cXt@=@DRVoGqҐtgtȮΧ*;ߢ.m T$EZ%"mhWd""DAV0ZUSc :񧚅woq,]Sf[˯lDI=Y$$&GJ6Qr{q28F>H7eWi2>w'rʻ_o3݊ j4}ք9g{?o9Cߟ~ylߏmf|cJ~L1_(ϭNg}_mY6ɟ>((/"Ozt1z?d5&/o˟Ow-`\̢u}~]/Ώ*_.~`G3aR:q<̂[w:\~PйĥNYVTǣEҿ ;Ҋp*y Y655tV5f1DW<:`}Vػ'-O|>[=jHoE/]x* VRk[%DkuocnTLJipR.)> (sp0 vDA=Խ nr9ȺG?{6?? GkXbJ7G#Z}ھ3;,;zy9dKZᑳDB{Ŷ'G"ښR iQoOg\_ *[jtBӔURZ =G],+aǹ/H M贐X24p#Tesg)E_R<83yz\' )M_2M*s_^:#:fC$ڭnjMg }Y\RkӫnZ)+7 e]WEy!Gφ*Z} l~sq*-=ͰY褿ʈ6חn6jCsoBƈ |M1*@蕴"r2A׀NwlO@vAYE0\rIgi)vᮚkFB*6=SS}e^ R^o˾hK/K s7PV 0^ 51,GkT\bJAVx!'Wt!SAEZ5H͹2Ud[I([ei .ӂ˴}|f5HJۜ97cn00xmVzͫJҰ^|~~Kz 8%ZZJ"DDgyo#D>3Hܠe\IhȱOͯŽ֯IxVҐ~7iͽ]߶Up C#A=p2+( 7 ԪX9CΊ7E@ pփE}Q@G.X8G )}AImbk_e3K3E]MoKE^~?= 'M=8͙-'K>zy$)>Ox4nlN9Ouy r5 3h6wVee& ds1*FLY(!b2cT3(]{ /uJ.%ElS9#&.24́YpVzUzTզe 5+ztEttۥ控-TMOmM[ TZS{;LI-vCW]O1*eD涓8I)mEr{|aF&I@&pVG+Ӆd1;KpcojtV[SQb3*oW?tx 2Y?]r0KT nSQF 2ph<==Xz"`Q9hck xl@霰rLd!* #O-1I2bg bE8=rx^FD- 8or= "F͔dĂT8X`齾m?{g㶍-b;<ᛁ-v4AAITƍǞڞ4٢#ϋ33JĖ"ɮoכ[C41tTFѡ{v~UDj/B\nQ>seVfa1-c[ꐛoXbrq!wo/Gŝ$eXFxuzVzg{ﯪ ϭ*ޮP4jʾ{]*WW u|U ,^SW{ EQaIQYS|hB~r.\U7űyG)&T? 8r{E<_L/.0/osf5Ka&&[#fٲ֘߶Nł,uFCZ=^}&WonCWq.A_osŷDCYhxdH7x8ˉzEK>{&[.̜zgPw?7ttcN{䔮 l}GF' fm@;M+abI=d,&dL^.q9PFg!1э;zsB:=( * JYzyf$P^D=K#7h[22n E 5tGgxyףhzY--JWH}&RZtYnch eD&r#:Fz\}֮{kbih(_T7@iۅwU9|awʫnq#-XTv&y;i>S(0{M22NnanaƖ.hr"lBeV)KL贖ԶQJzB3<Zك1{qs'}q|9Um5.g]n?aؔ2(U8WhrYyQʛ,- y4jat,x=>@r<,BV\2VJM]@E8p >5v3Xdf,)tuB=ʄt$+5Zd*bǾ<\WLPJ{%+,MEWL#mQNPWFY믚3*fRx  2r|b:TPW(Ua]yG^o_y22Ѱ #W Ti1,D}+Q6)_u =\]4wSޞ޹|iT7rF%Q}^qQ׻vhyذ7~w+!/^/~AIxz=}AũBJRe9Jˁ gh}|[u_/AS׳QI .F 8`ο;@JZսFJw?=9z{4F=+.~XŻnTuFU;TM+٪T xtʋj$Tc\@7՘wTs>IHW&+ 1k}TQ NGW^tEʉdtE:]1-)]7|tevz3z<~TD`'ı'^6Õ긺jF8jF[4 te:]ߺjJ PZ{uj*!M3d4MIxVk)4}:/qeQe{o&E6^%d\$/')nnto=\=)Fd:_gY?甾<6h1bv1.7핆 W>]]PV׸1/{ _l 䙖e,2>*QϝAG/1x A^`Tx?(1jcM tW z,w}32G98 #LlN&"khm}TM߇,rhtNY.#:>3ymh(3CrY<].˻Z E u XJLqRb3qmb3FBO9 銁>?.#Sjv]1FAJ)KHWq]2+EmSJuETX K)b2+mg)}׺:E]%}('+ƕ:]1-^WLٶ;]=B+S;HgqOEWL[dJ NPWN_%+5&]1(NPWAaJCP;dtŴs dxF;fjOmj؝fChf6b ]z3Pe|PP0mjdKܞ.K3mDL)EgԤ=؂MFWT*bcE6tv:A]) 銀їm+]*bZڮ+t]u#Ժ"`PqU2bZmSnuE~iRj]DѶ_WLdԕ17q*'j)ا7 Z`_ߛ VXsTMuy}FBJ |ipHF7՘UT;5ޢOHW j:&+U֧+}-+4{xrViHiܞ]:kLEWLZŔmEՓ*SWTtŴڶ]WLi;]=] 6q56Gw WuՌXjFi[6vu7AH؈hVz {CN{)nLF'#NIc-j)L*;6Irk9fЇFTߕT6j軜MqFl6n]&qot -;rMp -C DZΣKi$bաtŸRZv]1e&\tz])/4 銀H dZWLo]1NW'+)؄tOtiu[WLi|ԕNztƮ0]")؝NGWV)]Y!JuŸSkfm :t$rB+ 銀QӺb\+R(5tC+R`FP IEWLk[+:HW~ǬڊoTةc yvlDk|]NWf=6&̷nJt=`J W>m iNFӄJELZiԲ jZ*qOEWLZ+]uqیX(tuBu 銀u t:Dd[WDt:A]i)=bJb2"\]*"Zv]nꝮDWU>!]KhpijRNW'+ 銀q%+խbJ׵NQW_ƄtE^dtŸ֤+Ķ)e7~-Xt:ky2ȴδ]WDvz.bb 4/{l]5Gn]5G~2ؐHORvJu<>YB鍆uL'P@t]Ij$޻_^N-yc4sD'eU a޾۷u߆ܛqJ%Q6GK\"-g;)snwnU_qWe]vsu[\ՔZܽejz#UNU_kaqW]b?oV,[tHE /eK=lg!!Wj{\sK,,|[OɠI?p1YȅPctef|J3iB+@(wȍȬY\TrD(ۤچ,+d^(Y*:,(L=ܶ Z*e{Ke@KQP\zTX,|Md(E6)ȭ܀搴6#P %9eQx[* e ^gflK |z~~9&նTZE&b!K ʨ%E#`BS"PC(LXJiJ31KOFe,"昋|g!c k S 3zͯrO,$ KZL YnHrP(&YP2HȲ #{q.#e4*BQΖ/m@kxG炲_ K. *$b{ h/<)0Iκ̵K,鳠2~Z''hC4ͭ*+bVEC,@m# I}_L:fz~D=1ȿڤnm7: YlQ᪪]k]VѐJJ-At@r=ʜ%x?V]R"vLMZ%9įZ)|0 mttvNVCJ Vj/ "$8dms)T.WPf+X'餞5/]FT,CD ɞ5)0sS`w"R V@fY)dWH! Q S( ٥fّ#Dȗ*m`5.0`-OXUAEE'{'0O4v/uh;v2Qh?k(QLnB!VˮRWHme±&6V:D\KYـ`64K0+3\iu Qx0[ 둔YlXkYsBE(kGU((|hNaROm`q]b ~Nʴt[ ҀRS>!]AАV6p.F9G!HTP&׀T߄L 𿒡2UWHPcYTB2 V^X szeY r3Jր1ukP V#AYP6{iFn3%!2֜An9+hM}0h.Bl coSL3H ̚` UR r ֑tA@GJ LEwf+%RI9n+XT5ofY#<;"JPA擁eB+mՕHދYUDI)be+% 1r 9 VDlZ"5ȲU"ZJ( er1! aUh#ǻ=sA ԙy& _ݢŌUDZ1&"9ih>(ؼ($0D!N&SrL0UhYsJod xW ޫ]-^u mz`-$ >:%@uP< *}t*&{H\!iZTUFB1L: !'`Gef:#.3(Z R|$ LjyU,C Y0vt<@= KH`Ge YLZ[]x $nǮB@8Ϫd!T? y E*(@6T HVE㟓??Wˋ |{lG. O&!z*4ʘ} ]{ #BK|u)}nob:EjC$*KtPK|̡cLGuu $$5(},C)9kD*ZuIBv 䁀:赈 )-fm` &$˽e;VE̓pD"(Y3=@"(ά$1SZ(ͮɀ!JP CEdU5\J> ʰ"  De#@ A1.'J s nOB:kϢ;i$A( =`-yÖ Vk7 XW6b]MA53"Ϻ|AW F-LtB)* jf,(*Fb1;L =yT,+~x$mDVc-S{N낙ٮE5kփ*M3| R5gҼLFALPha3BBvZuZ"ϻz*DEKnj-z5zC-*mP zx;XAVT@9 Z6i/zBL M F:`GrStE8 8%mCɵ+F(ЭtF< \T"nm\4*w^. b!C1 PttQPdpN۩ %;)jI-, mT'3twE#BPޙ#B?1 &ozF/a/n^.WwH4ɢDM L:@?sqbmϤII?K䙑Wf #9 Gfsɯ~'QQz,:Y[v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; e!HN f'= rt@@@GIN v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b':F 848V3h @^hf'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vh@^m@N 6b'Uz'5Н@)vH #; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@xku?_'/i)z=\_ݥzw(˛ L VG2.iT-f&rƸQqIqK }60]ub"Fe:B DW<~"ABW@ks0Ɖ?v7vj7vB]RX1(w+tC/Xy> m_hA6'q=[|hiqiҡbžY}&wWyy#R)}jDRӟnC~i04Mp*wBk;t&1M!M++㯺R"CW*!(tEhїJ[Xq`+BCRaaAB4tvb:BNi+)1 ]ZutE(e:Br_ DWaa hNWRc+bxDWWPW K %`s@?H:XܼDgEDY9Y|nGgͩX[ARworjq29)-tѩ;fŸ/.+Qn(w x9}F]~@e8y0psP~0 `$ȋMB~_.o.忬ow}h2o+G 9yuO?qr;ȅܮZ}IzˆU 5h6K(:MeP]_9]?ڍN\]4 jW?(J.yz R87tT+%-M:Wx4NVޙMkLfD{rMg2\ E#Gj8vیCj2JjG(ՂWj00tZזКkK(]d:Bކ8]pnV!HhLWRzuq~Z}{zCW<4'ځէLt2II3L3׏:)ɴC|yfu~nX:]ּ|{TY5%-޶͉Q_$ :&zy]w/MXblݛ5,_a<\^݌]+O&K GiuЮ>lR-}-O/ (oS]37J 3#]f;SO8egT-$z1ޣxn{W*/ Yh{< BvwX%ghSѢN(TBPɶHOu|QK'0)Ll&7c:O&E7TDMeFxd_2ϙz{՟N5AE!ZZ { \-eiqK 1.-St = ]\F+BkܡtJG8qaFtQ)#+$QCWMǻ=|uE(`:B1<^d>|"auNQjutE(d:BBzi+juEp0 FyKzUU `?ΦcBvB ;6fO* 3=]3w+tC/>TKdXg2JTlm-~ &^CwENh NӄR;#iT#J;nZ}tE(4?"!CW+m> >"PtuteT DW8 hNW#+@ta p8kWRktut崷DW8KQџ\ #]y#I]8 ]ɖukCrfᐬہhl/^BS^Mo);F3N\CaD¿m.U[%#w7+o@WǮyWn.f~8ÇJk[?b#H]y;ޖP-I'|=˟MɋʢyS0YYIl ӲZez9SUӆ'Kv/ۦv܁p= U8fb7z7U͠}|6wuA,7ńQv;﹓h (82$0.4ĈU6*q4FQmD.Q̋ynެt;pU^*Q(UI%8WjMeD.4v 3x%LZMB-Mђ(VYj"K?6kxcU_;F>]8N!3SjjQgd0@x0}^[C?r vuy9qGJRIr'fe썖DAU佃I}FaXVJttE~zјTFDƂpB` NHj-׏$X4 ^ {0 ~3qJ h.^s8Bp .}<-i"BB SIԁhɕ&&7`HA0NE|JQٯiyv;˫Sk2Fθ_rajRplb6'8O\j\jcҦi^\>]T?4)e`${ϗKoo]s MDwn!jj$.7U6Wcj mO؃,>of]-]w4Β͍cZgT릶J d3KdoFI0Ymt)-r+Bu͹H U?6MrzWȎAutWɟ^g/ywF9{/޽~3_pR8@YU^}꿿U۶fUs#6[=-mN#hZ|Lsb]om|ʭw//U C||rxyrqޭ Y-^\Y]bg (c´ooSea"5U2*İ\l[}3q]R s,S_o}5}}&d#GHHbР * @ ΗTmE"]Z51N !m0Sye-={m撍7oeO)Snr-[weX/|38>Z! U>|ASM ?ᰕ 9鴲Sxr'Q"~xb@2[ `MrmF2--X/H::C5B)[-(iGacZXu- À Q^wā b8TM:]6}t^= k.{K-ޢ,$)kGSwsJ\и_-$ iF7h^-[:׾b]HX~%dPbJ&Se+4V2zlPbϝ{̞5u ؤTB GpjĀXZ-EaNe< >\hkaY-"` I2FdˠKF$uTaI=*>dPOhSv?ݲ>nfqG-Oѐb.PXbPGo[1 B2*Z s졫#+rRën (a8Z{(zVT|A1|y6?B6NbmZỷgMCߌ͏Oi_Ci_!E=RzKgSc^b,N mn#W#kV¬s(+ƅqzrrxիkt9C-Af6@5hJi %.݆~pG3)hJ)#n*ohJ0cgzϾ I sUi2Kϙ+%L72!>r0)x}߻e+G:8K&Dm$!%'4Lb^&"%pzܨАHi) 1͙Brc.!1S$ k9{P'?z=L„2q?9cWs[_י/M$A=y_D& $wZ A@w@tpVwZ;ikz_rRQdk߭ o =uOdq-d\Ni+]la+UEs '4Y)ڗf)" oq6My3/ҧn39ͼͼ|D@d1Im;"j(4,%oR"Uبb zAyo@8$,Q"u;˝^iT˰5rSe !x問z5D`ʡG%@>)#K  tDPEo(UAA=E@ ]-cIf\rcښ2"Ą5rT8g4#0*}oF9dUTԼ\OM(!UlfqG'[SR6Ռ7_ߞj-6jƨD htHo1dX}D$BP/X#:YiIQbL#]^dP F"EѤ$!UI֌sfl []u!kYң.|T]8(mlGPa+'֗UTcȭe>e>@ڻh(-яO(]h2s-u ~pj<4dC 5LY`S,l.a<ꅪDCr8(/>-til߮oj&ۅwa|aL}YOS΂yէ?C0^~7pʓJށ-RIi GH;)vn5ì%huK#)iI0F|ra\~}~= >BeQ&9T*R)ޖl#05be(o) -6sK*{ LJ&## osV3Բǹnqx9-yK1@NKfA3-E=x4#<-uoeز#-["N% JyT& U83E@Yު6df^ Ӗk녥!R"15*v'F("#m-Ug_ psw`?ך:eM*4+4 cњ$% 55I9Abp3o'q| aF'eqT;oe T,WzGݟ]ƌ 6K&ŕ] |)NPNk (c$c#hw9PǾ7Ylm͏_ :.a?vI8=n,wYc>A^>'?<pz_k6/ԟO^{|U& f/}|UG.c}j?jx\jgԋ>`+I5^j݈i$tuՙ9oh:y-fGct=^k<~Pk[6R۸k'ATZgL]]oG+zUsJJk&X_Py * 1Qy&R cr+a\81:FF$`IrSukU3o^Iq=dUܧn'Vw,݊tirͲ[ڹ939s`2/g8KLG$F9P7Zhc΋eum{HHd ٧D QƠhgb*ۼlY(bvP^ d֖+ TP.Y&ъ).m/tYK`{ڐ4'6 r2 [SDVheN滳`: dL(h&C> ѓ+V J3W#9Ɔl@aY 'nk:5 qRZf [XFPUH"Emrƺ$]YVd{S&Mu1DNr+AZiD$! µYAH0QO=nI5BSY;[ ^ܰ.2 i!ͪ<򧗟ZB=Yz3MBƳݷ*3('mC&@酇jI|fٗjn׍ [ռ#Pw \0jI'Bu.uq0 w" almOge}H9+bjQߦZAM/>_Fז|n޽Ű/x8ҧn׵HV#\%߇i7Pf}s]h$6=l{LHKHzdhۇ1CSWe$i 6Ht#R(U1GNǬN|; c@Ygxr tF(Y(1Q%Z&g)E_JH<83iz\MzFnmY En}ysG養>0t`ŕk(%I |J]A旓nZ{oq\Hzt󑎿EyoʚCw>^E}:5kts^3Qjh24x}>d*FUk D{ { )YN tǎd %>SY9IQ1,RpmU]2JѼ~ ;5LqX.x6f_ً#ZAjMN.1!# !f^+y$ST;k͖/_Aw*85 0{, .30Z1^P7"b/QWͨHԻپfD(]jgaZ^^]jR 6>Q9Ioa& %<9&$b^UNÅӉ\KQhSSFkt(Cc1N̕DƵDk >N& {0~?s}]|_`Axs$/-oۺ/{ZJWzq4[d/ ҅b:VqĐMm"B pEq}St9LQP1Υٴ>瘘ʑC4%řnMϷۥ,tsBlJ~KYS+OJzv$ Ѳ: >z<X o-k^^cN9OJɍk|I26 }Ϸ /'/3!˘Q{H#Fe(U1F2A=C/DڣlUxiRr)i:e[)zG.e"Hdw]2 /'jJxr4~]x/~)+i UdzSjs| o^vdeSY˲6c.ɼR&\WȭJmg3qmޓ/F_F7ۧ9f[f;!lhd\O>vɗԇ=f^}=L-n^>k~<}ꎉĭۜ#yX-poWC-ۑ|51O韫󯿙?Duϭ͟7 \?U]]|\*x\&zD/lPh x=(qr#?SǮ|f/ԏLl2 T"DTht!NEƀp)Xn^pNzB8!n2ݔNVt}wb,m08mGWI_txPqBxc L&`Q(:2&8B2ȍ6*E8E(k-LhP'L#Wޠe L$v6CY6"TVOݡHgy\lO+'xiug$'Bk:\S^ ]%]pL)KoxoxEG-k<^$EX0=qoT&L%7A*fN2 FdQ*&I `X =0rxOJg+ٚdJ;aCYiM> Hr{PֻТCg5CC.PT۳(> Y /0ENAWns9D:hl0\Qs6F 0IN2FaPz3f>՞ݢ{Ǐw>0f/{T)dTхTnBW?<%(\ٷQhY*gkL$7%Ŝ ]Hj4+Fh۲$#쮆.KJt`8(Id݈˲?pܲvFy?i%OG:=+-ܼtzW a<^l0Ŗ:=v|;ߟoh0;}|H2|;륛8i Zԕ ^?.P.JZۏh'}֠}V q47*fEm8Okl;#WGii~ɛJX[Y*Ǝ.e 7/gL`K i9݈m 6s0h6,C:lw|Wn'dK*37"fg^<A>d;}H9MxI9Pxd/nǣ/2_m1)8EWM0;ňőEX4{3JW `2\/f{9"-3pVi#7h<=n&1q2[,evSq۞@){ 9Eэ>zsh 5uz(x"q>t\prsB!Y3Zie-|}J0}vb!x}J*`YBjLd#&H;my2RF̳,HgR"7zu^Y!74tW:;Jg`Yʖ#x`ȭON{UJBd,-Rx=xW@+p$m /R%i~6ݡEBlbOЯ>n~>!m yvڨIQ2HyȞ$)WޗΊt2P77 W[P'\#:9C6 :>qm6'ǍB Hr`WgoYja)v;{VgkO(W3y(_PPXx1 B/A(>EJoA(ɕ[-? ~tP]d9 ]6H$iu=^P~}A;nPHPPHU }f5=(N=8e$(qr.L,jZ9RNh2(+1N&. 8 be I(4Ir#(+{3NucMOȽ]ڕTk_dRھ?WUkzuxݛOBv9{ܙ]Ѧs3>%_'#++r:<'K9teWn#]x0 ǒa驪Tu}UdƤ$HKI+LDhpXr[e%]YW}]`,ԄĆmӳ*{3iM"% Ic$ϕǞqߖiYmLa[O3KTuͫ`RH))bk@RqsE`KGoٚXKӭ7Y.J}H+V:ut(u)K}5&/EOZ.bES> X{88޳9yZ]ΏC*ޛLV+AK.m=2ow7D.k d 7olOydv{H;lWOQIԁ3Ǽnu 'fa"{hSY'QfW GϴzȟřoNO- #d_a}[s{92M8mJ|sTjH7vH1qERuK>6v-6+ӛzsM7۟.NO;a?:z݋o_~~?o{F~_yYq$3pR, ;HK$|/}d迾3C -㭆[ mxȧ^N-7#oSiq{cv?/poyj)2g`~ _b74O+˂ $-oqĒ*oBܫ6\l>OT5}}1,I?lt ~6ӐUNTE&,(8jęt YPA)IlTuxFѝ +6׶<.r)T!cdX 4VW**JYష*XϊΞ=At*n0SȎ2~ggg;-p  &ʐEa;۩L~,qYС#NfupjjTLrWWM~n>}?=0_|7; )(a^ȉՕwUæM0p:8;gKPl6@5hn ꣗hTAtvq%@ieH>o.w @ N TN|EmTEk[΁.թ WmU!kU j`zOgGG|獈Kj+<..>|ܹy}Ć bc9 眴)b -v1*9}m%zd$.P%WVos>BY:kh<}Q×<;=ӻ_] `LyG tU5bEav6Ķt&%%VT:SXOC\mh֬lD cPR,֟uZtQ󠰑J 8[ +})9=F6fj Q4n`89VeoS3xq̿5\ bi MH:E 6cjjbձ[8kqPA-ȷE9SA>֮Kh &TP4  (B4F͂Z޶9L`i2a֌m O腹'~snC,w}S)ɓ ̲o[SS_4|b FaҖk[$R<$F-)W>;LŴ;VJ$*%zTjPj5F64+!CQ=%qb+GC\k+vR54ih897k`p0θ.4B=ox! nxsl|O.}8؟|*&K NBX@"JsG.ZHhB-.eLC3B6&8JNhLgBb_ TJkȹYcǣidbn;EkQkZ-ZE|Ɔ’فdU^u(֩iP}X"[b! 2(VtBkrQəb89sYđE#f}ȦƦ?a`F5FO$@l8"?b0nfj&X%V,X6ųӐP\,ՑLhj31X\qpJbI$1N[D+eV:ⒻE;^Q/zZV+#H~Вa L-Tl(Z~PVX!Q/>^PqW}ևw*{'_#oUxy0E;'~p[ .au06ˠ;01t1 W8K4:_Жw  ޑEU*Sy"h@_<9Mq飯JLtɇyKNYKaVV'Ψ5Eod89_#;>:Og[ZW~UH9K * pt-&YLe@"j\ 2p]b> lQMrPxBx~ 㒋.3favb فUUHPo}sI5jmG:,qLkHc4 FnU,Ya!ToJ%2k  O>&wm{Sk%VP@9pK )Z-2SRA fZ,w R)Dmʢf"/WݥV2נA;܁/M'rbVa@s8'?b:@Ǝ p p nOh*܀dAv+Rֲ kmsɸ[lWT*V|=6smknb)6_嵳6RH!MZ@#\Y7l :y Kַ2(Kd6rպ 3gdXy\[rnk7:LJ>JkN V6U(%lڶ!;,(,!,[cdE1א*r& eR(fTU( g b, )N%"r0ٶ9_fh~3cs$:=X՞]kYd6@ x̝¡-mfV8a2^s$BGj:aQ1#Ye[j$߃5ybrn5WR X{'K#c]S:4T3b?+|v7o\GtTHضl_LMź sIG@sA³gvg?ʑaTˡ^å:EGjOg~yoE-*zXjWG%]j/'"=eNPFĒDr֯wsC_`Lm@[h_Yb (Iҏkƴ$O_V]Ew8Y zݰvϵ7qq0Y|oy>]l5ENdtP{2ŏOm NEiM\{3w\3nIm`D/&K`_{xcnv.B`j `ewa] t蒠b?Zp,8~l ͂>Y1:O!"Cٖ9׾xOeU7ha k bĔa+ؐUrl\m2*=3jĦ)C-Zh,;m|k gܜ~I8~+1KDFEpԞR$ut-'Cs';m"2B>޺LrEn !]\ GB`|q Y׆[zt<4!*C2]Ceցc(R+CBE]P|,Ɛ)赦h1bh-g* ]|s0rnXCt6 a=W|RH=y(`ZhwrCI(~V^ j}jgWն0Pֺud W-FLr6IUr$M"[@Lw% lE+їU!8pXlP50+-\4ckFT̅+p)jW.عX@_-gsK9/@zF=z{m WPrb5* @^'k 5a4H+!UUB:"ۗKE} =%䅂M9$?Xm1 y*e\O6QT3 #ƘC QlMSUn5EQ5er >WqՖbօmi*BQ:j>Qufϫc[D0 cOWqp'7P`yÍxvy3;BK: 6 \ |^7˹ea<8]Y/;G/-|> 9ͥ86|u.Zh7P9Z>jMB̖Hʖ)Y~/)02|-X6"E%( 'A `w*޲yU9EVӉy.p%W %25$2gt:%"80s &R0cb)Dd=\q [ ز4{}Z,3_G׏%y*kTgJ֩^ϗK06B02 >%Imq9,xDeq}St9LzpJIJW^ GLĂ1)9Ú+k XnM= m +f)v1S%D`1_/3LGi>GV饊(k8a.x!GB K" Mfp2e2lM! kW@,NjdXx :uY'Tu2_ZMj2?eaeYIp>u 7z2]}VʄM)<&(xDhw׹z~b>eWJ-Fкx-[s<uK͢Ch%*Wknw ?yyܠJa6ouu85+;{>#wI~PypKsMKd3, %5 ruA Rl)W((87:!-Gw"8;Oѱ|t,n:x/?utrqhvl wPȬQ4}4ZLdtw?~};]8ʝS~;OwR|3A w^?-#aQY6DvC,D,>a2uߗB *TR6`#WWai}^u|ܝUmJx ¼P\,?(1 DJ)MXB袼Q15t(oq0\Qd:vT$ΓGkX6av !ua6?&55;Q|EQ4 x>(UjEh"#immί~L߃L#R8\&2Q/?w%y&9PP0Gn:ށtmmE*WYn5(A3h'){|Ldl^熩.vuT,Dލ{uzf/ OFMFZI#q12HB ̤ĩ0`''B 8y{aZqϏ,j^pnϬ ʥ^%ũEts6KFT̐8Lb=֌[#XX=zytqq3J5ݟnmmlqcww5 AJʾoC[GU.t;R_e;(ʅ^|䮃i恉#P)8a$"`jD>/f<Œ1i" )J}BP6Q}8x-5(Ɠ>{ك x.ŜܷYLfJmm> {*s08?g 2$YƂA!y.IP^'GZM}Q &c_oHN;kd4Hc,!V[\$.iOu(EʂR%mܚ8kV ڌ ݵW_pWY6ŲN`][/dYj&P@XbљY\JR60Kˏl` L@aM v3UNAZٱUN6UXUאH+U^ \S/ΰ' ~azF,\=IZsͫ&eŁqp%W}`WnЈ"3wӃ\.eTxǗw_][>p^(  .`Vf &@C^0v!@n#,ViWb r0i &Cp;WY\i)WYJJz:ASL:WY`vU+p5 dgWWKe ּ;*+LW*K{pRҳS+I07Cp vբ+p¿WYJz:ARI%c0 mpg,-=7YN4h1858 \hx2΋ `B)dvV$YR,eaN͜ e<&EU+<C_&(As(Hg8 G^kjE4[jmfW@sFKιh1R!3䞹O拡7;3͝s45;$W&as[#!\Ŧ) c܁q>C}ešh ÃY2avMnFaܬnIy{g~|SB0pZf=zhOfpy+ LqjsyAeLv\5]!j~oim}Hv)(Cq2k6Tִb<-O|]70W_ޔgL2M |0yd`Tɩ-Cb0Q7o1%!\7$G?oϖKbP^gLW;ߣ_`%jƛW,k_T"鵊4-\QUE`)z()?UdXXE"gj06rf+4+;9Kk HIp =EW֜۝FNe|Cmz<8$Y";a?-;ec>6?s;R5g^. U_󛭜m:IH [Sn02DŔϧVn͚;o!nn>4Y, 3"KFpFlzcV 8fD~$x!پUj&цy¥\1i ޼Zw@ XvfW}>6d^V=r8)ź^:9q ).ktɵJO׮eʆaW;.7-%9 WQlKh;t[ PF{>V\=KyZ[K33tny$XLjA͂o-bŰj r-c1O԰6EYUk?g4yZـy{Uǁn8xkj  FRwS'ϝ5Y ԫ-^?묑66) o#IB;Rpʸh+nr/[MlalR{aFEUłZ}߳ݭ6qA{a]MPjJ}J w:ϵ-[&>k[%!?bVՓc\nV?*~;v-xqrwzf O* .]*v}भ-MUh7W\"ZFC;D_y ;b >C˨0o_XP-XGfu6FN ueU\1|CxAR,ԉYk$}gcb(Gb'h\VP*rr ȑ<"J%?Jp>eUBʼn!]1\`Bg9X*UOW>W+Έ krDUKUBFOW >9쌆yj/,o~K_\ZL[fX:˽Ҙ~w9TwGc\6$oopϯ;>wXnhnCzHlft|g& ,,N#8G"0H =J{ɋ\ۅ.1ᛳEO 7Pg;tSzecB MWjȧyJ߲ U8Wyx=Ĥ;iϗ\zY3"s-HcD(ޟߦUɋjj>oa"7i\G˜[żKM?ލ~ɛ/yݰZ̋ Ol߃o"bM:٠}v&5BAţ7nc-5\"iQgabP d|!!DñJd.It {">\un_sRQ\EQ0!h]B1jv1,<5^#ϖ4ZpPq珊c5e68 ^w1 O9-u_`,>}d-YaN/o Xyrn[y I0@W'u+zb8qC嶪""(>=Z_[1,K%)L:uf>sGbg%:‚,$AS&x4kHR*# F`*?MGߚ.x.:RpTD2'TEwZI%hD0AP4@xR@p?잉~H*ZY7M!E!0K%#: ^I8 A#cp/\`'O55m4 ʅaFϐ2 0`QkgREӪ'u2i$Ĝ]E*mt}DT\x:$VXE HcOra V²H`/+(8 l=F.Y|~9¬q7df?Ib2ВЙ~=N@wpV3lv+PtS8f~~]NSAe\q3;,Yu.fѰ[{Sc@C"J{1u(¥ߜ _%P.:mqPI,I15\sgɍAuyh|Ӷt>ldJ)(Yyխ~ri~tPΤ_ނ*iin86yɹU&lM2DkbavK7ާo^m#8 _^՝ U5Ug[tVV> %?A^Lpпwtwٛ<8V7k7rUʙ*k-\GR<&xZtAw>YjeY UF :w?N?߾1Q_߫o| 3ߠ&pjpծ~ xU붪US66[= Rjz摒zKg)T J?dLz7> n~?[̚ 23? 5ݬQiBT.}u- GVfy EY~C)ږ)Oo[􏖭|$>gGz R:s%!x(+, #}`ÜR{~?l%9;?ǚY.:Ǹ]Էmj5`|)^b r"<Ú#õB*ciM5"L=ŷɹO~3&Kqq6V`oMso59&}Vv*X^HtSBҶPvF  F+S875κOW]!םLNulґ2Nߤ79eMe/CR)"lNi_0Z2bD; HME4ߋ7 G4bݡ%h˜pR-LRv_^?Qra%IGN3q4 :V|?ia:@6Tl+p{>Bه.A9Nܛu~ {fmҶa& oEy{V"M.Tљ8t |Xm k手tS4dO~FlJa^iGjd~sVk/;^s K *ҍv1wT٠GEhJӾ/| eaMd_P:GYm໷UC_Ϳ o+a/>ƹU8ts&KUyoD)ٶ,-S]0[:j[0yAuWY8-9]kq&5=ڗ9Vӻg-ݽғ4 1h&Ҡt4m 3 +$T΂#"ᷠ?RRxN{Nvhm{l7㡠H0!X4Dԑa.tx{#Q (PLr,YydcV`h ~Lw0N`:n6(o @9pxxXv^7\o.۲֯GS %@t")K5oIy[QޙW݉ZS t+k2NdВ;Yb<{ۓD ͓o.}xܩɍ kݠW!"݂cc(Z3'qΨ̹3!z?|E>yp{W-C,ۡ#7ɷc#D K*b1ߪ(C0!B*Caj3jX 81hFs+%e58&=HcQ֊}Qf>(f1*0aN#a+E( 53`.)l*ֈR&d{V˰]M&buW/^{/)Sb'15U` @[%L'9JT.b xAvmYhd 3 d %6NF%^F@.Dj[p.桰vkq(MN}`,g(ì~g sh24S-5 WU>\H☁ !fE;$p r ǐNA=u4e>l z>,Bd"ƃے]̈Ĉ'F|PL;|8)U[Ĭv>z), WǴ~q1פ:ҒE?_/~q㎖0*IԬF?GG(R $D%~q~q2cikav.쾑W2FnŦ>c zxLqSFzܢn~Sf,p5iPR=W?`&RQ6rt&= ԃ*|L"W!)Xy|F4*B`/ %0+%\ lAk]  "\iV"mۙ a@x4q8O8?=ɿ&:ph%0AFme*h5Hy A5-"3xC~ LhH E_qlǯB)8 E)7͘ٿ}>%o9R$wػO0CbijYl~ܷv51=H2> 5e2PhkU>ح" ֒ [b4Y$&dc~}}ʡ>Y~ ~vqC<.-~UJfߏHۙpnN/ۤ_ֽ0ҩE 2%$T t~OaajCm*Y>ĩE 6@LlT4!dZ e ԜO- Nx;tNX9TfENQΒ]͙$IJgO9V"Rcl;Dd3GpK1J$6Y B(Kj46h';C 0KP+⥚ͤ;O%(q6]ķD%7MWS(#VNG zX4 eH:`(T4]^M{4Xwr3cM39~boЈ#/O1}01pA ,D?J2w7S#;55@Vh]R=\s\xib Atgs,ˠCz'   e~eV'kԿ_pK=aǷvjJ668hOEVTsttrqg1^҆gxHTGIC{ Y ϐo $Dfq gIHPJO$id (@փ:.#)xo ,uWg{- L2闋!hcM!S/4%HmصiW/oX GqiiO( 8C=A`̮=;_~;?1v,CPSyFiBR{ΰg7cv,jJS1ڪDVVJoy}@@H&u"a~ޑ)d}j҈Clm˜b -S; ;]E6& tڇ5 9Il%TWXmf V5XFQrGi2< w\vfR81fPBRK*}fLߐ!I\H|}V1M :pUEϋ5TL5F6rvjTkR#O+FgK;[?̖z~IQt%IK={i&Z22G}ϵ*lc{[G7 j'o{lRr ٗl*!`QG ࠵j'yҡtm1dX( G'ð& &+R |D8pqv8E~1Fͱtbj_Fۄlı &jf-xXodHUerǂ&\kNbgkoGDe~YAb*7wGdyˡ6ټz1TMڐ2W2U")u0C11y܃aJC1GӐZG:G5uzɰdc߇O a.6SIZˉ->毃Q~vcT@R Ki3Jvq.AU}K)^S=^!P=:q9y# d!>k>OiYT?ӏzJFR(O߭7yOGl5ꄟW?_/}zz9{v_+湽Вhlg'EΚx@/fEws/>K~}G_DBkj-+zmPZz?. 7q\,Q&/Rl©/) ;ӋNE_+׮]ܥy+I˛yB\uY(>& \x|rګ fN^`xlXTTK 1r `| 2 I <f #* 2$ЄYmΤ,Us-Um- Rm^Bmob;{ȐI2aT 3Z=3~ K+ ^.|ey`{4t?vC -dj 1S͌%Eg5g%=ګTr $L dE6u6 9>"8.xqj#d5V\fxMu~_\ y?|cֱ)їvr >RANQ_wz0*os=宀w7H-gYj9'ĖpTXߪ"[C̆yXiN`UAux|C(Ó0zVt >ȄQVH\TiF o25TOǤ7O{*}\>CI,`a`@q (ypMƃ5sED+j˒kYlٔc>Y١[P:)ꭉ֍KqTywP[S]0s1Jַk>y aTj`b bGu51cme@eHe `Q%r)"crI:q7lfEo`d89)= /cRkпL)%TZLQYҒ0:r4qs寋ux/t6uɳy/ښ3Go}3֚_m¾H\0ҷQx/kerh^)d.KUⳊ_u~J^=߯n޼γ _2ƻ\PV[_vm凋_^tWܬֻ{ugFvM7o}aW6;>yXxU]-_՚R<<-s㜟ֳV397SnN@$]Ҥ@tqBiNѥ3 G$ [5-CcPӀLmH(8%BN$?o,~čWʷOOwtH|X3926fD ֑i9qinE2(Mu} F Nj6o2Y2ta9usIXe5fΪʍOPh$ bBrn KbCL`[=Nklv[1p`%ٝH^ȑNEL)}nIin}C(]uKoX6 $M"5 HLLRj !&6QpтT"m!F'WUHa.OjY)k)aOc[7:[T3:\*հOFgz8_!X]}89='1̵,q)ډ俟!)QJ9C ]_T}umt0iUJu$Y*G+4eŜSӧ'j4;s[)7oS.xN.ޏ<>(IHL>yoGK-2np@oû?2)?~il%C\u>屟z4>NgtUBһsZ@6`#gyi_oy~܅&ϯY(RJ$tړ?2[o? 5dJ⯖o-jߠQ~>ˣb>N&|[u7 4A:aytijSץ|_/ټ8O%0tr )zP/D>6*zŰʥg,Ӳ;˚s0pVS`mt&񗅪..|`x 5h J@oפ lLu=.oqmcőBt9"#FF^#G8؇dQ[*Br!d2/E+-V4Xb"z/#RZK'ٻ-+j]qZaR}66"|쮓uevZ'"(4 F#WW`,G ';Z\ %w6.ơk?&-}۾iV1U-mS.*S℧!NBK!'5ϻ&|J&kv|a :6x XP<=%==2ApL6`hblI"l1>ʡ27.¹[2Dl3eŜo_1xOY":v"YqNWŕv_(`NW~{6|~{bsu/qy> \OJ 2WxsvZ]T+p5xe\qKi}ѻ4"'L1 shP7O^Q9_|G6R҃=Tꍐ[/hWNR8}3jj66:=]we`@;%5[pN>c_Z_, _*y(mz[T%_ztq<ʲdLÕWo|)Y!dKS`o2*D˸,kmYe7Q k@7:` Q.U̎AH4lƃ=|džop{ɫڴ^k \],`E4M6RijzneV?2=gscVku3NN+a>]7m'gd Ȭ "5DDE&{x(;{Yj/*}dJv,Ȃ^#sKk\UqBYZLUҸ\@s=x|?ZWu싹҂R\Dse쑹bڟU\}1WUZcnN%+\LQvՖ-EG|U}rzMzL\:PovXt2X`Q 7(6}~`߬FemO7JW2[/;={mkk\j$pz:9s˻K.7͞W? ywc~_}G3;?]*$(ҵէ/\+ `WJ|`;w~[),/yurt#'yswS:P[C:洔gEIebo߶U :&/因Tr祆>/vWY'ZUr?XȮ 3#?&@A5F o(޸JoJm~hG}q)|RJ&DΗbE^{E˶3a  8D0F*C<$ wH[tZDAXT(g䗮s:īVO[2ˮҭ_Љ?iΊ#V^ے7j@g5 Ԩ(?o?Y/2i%J5`WY`fwڔld|6캅ě!A4 4 tHB_HesޟM`9R HZuްgJXU *[ݲi)pjSA{%(8c5kTBIɮYpntٰirz<_jٞjeg#4$,N/;5Mtu'ϱ7ar6"1`!ʒU4g9ɛA:s2‘8H[caCoRR)@4QB`MY$/,ɋ;ِ֕:4?"M$x0 T<8fBL"X&V(B.vD++4K.C.FX8 Qb(cPJԍ@r-2IG;_RnWh꫺AcHѳJRDAXZ@1:X kDik .9'"QtHǦ9~}E~Yڋ2\8kXC[-p"0=Xw0*HV7d$ Z' ڞm߸wV03HHZEAR/Fz!ʖ"#jL.qnMo+Qmبх O;J1RT9F@!H FKAJ6w & % !s*ɡ'Xu6`!L b4ѥPv s_kh+ kW2d򞲋K^ @g] ܠ,,=1!d}TEk^cf82 ^J ac`h]cW.2V\Оu Q+2)FZ*vރs6%/5J,0Q $!;S '+0A:2ORP1LA͵W.|[jjwz#C-:&ɚ &;[H>@USyJ;7=:`U dx4>Msc]8x6.kA[MZ:Y> 8vơwx pl2:3aŹc،k!EΉAQzɦšBD_H$# %iE&I6)D4F")C"3;4Y\ӷiXe~džpG'6g$E'a=w1*Uݧ٭Zk&t}rjIr4ڠLJ6LY5h#4 `W@a}, J [2eJ*ʜ(;Y \\NF/5PI:;EAhm*@b r H )S?H"2!dIXDElglq1 Ėyzs-*98>`4\V )ԱQW8l-9o 1GSpJH)t/ {BPLH1i :Ubt\pOчeX@YCu$H$;,*%Ů:n%>G]2oU;V0e㕋pTO|>Ԅ){g{/E]Ylk\>iBG?߬J+N/rlڻϽ<%M&D_X^XKg\N/S {]1Ж+P:ye-UP&J9Jh6OX7[=_=_ALu쥜< #%#*Req]UoT_b~ݮf#.xuK Z_RQi)@)E4RS (Nb4/y!gePE3H`cTŠX0gֆ 6Y:gC=kgf,T>6-9ED*Ah*< tI!DM}ETDh* t__ S]ɆD!bcI%Ȑv>.Q6-((eZA8;OV;X@g);E%VD@Hf4fQZщ8˳ϓu&FdoMGAKKrP3aBSAR?{۶d O3ݏꗁ`\|cu#KIv ߷^[0Hbl6KͪSͪ8ѼL pqywVnEe:gマS]O691a&л(P2b:W_!2ZF48z(g 0V͡XG1 :TaP?"4O`#lR7MI?cH|yBBrpF4lC<><5Y՟a%QCf9kdmlHu.)xJ¾yv踧Hv(GX *U}P7U8oں(Wˠ"w ĵw&{u'i}'%[59*8k0:QCm {'=kwo p߸k_Ii PȭQf&sh Y]vL LYn+n&"F\Wcʹ} n=!r6o~hӚY.[e!BTxTdDˤ-;r VY}Ju*'FP{xˇ;1D(Joy 0 RU@y6V?.wHkV$#"[$rw{k|XMG B"<[w7xn$x"As-1z*E0Yaļp)KxxƓyJ{ys%^'0!:%b~΃d^s4*0كw| z8xVLƹKZE:-BPD QPRb $ M![}1]X~އ+lv[Q8d0IE#, 6BK%x6io8 zT̛"1I`0.Zn|\>DrФ%SDm(5&>Tt|sB7;‘!D_dn G̙*5bv*\d MoFRWwQ rh}<㋋`vVm钋ބpJ6Wy\`8[*?mU#ۺ|D?.t.нmZ~~|zp9= YV6 6Qu燾ΤO~}nu5]'|\?xSɝ;/~~/ Onh0# 5za/kϟ.^?eNlk\Q3i! VFҘ?2!j„:h͆ruz'YT \^ZLMg:ns; C|Q(Hɓ/M mN.ۜ7}^`jI Ɨxa}W5iv6,9ll_M_[ dM/_M[1l93;Ga vAe,^\0I+(XA\wZ`S [UQ; 򩲯B¡{^xiuU{f0s;[**{Q%nZ8V3d4_TAM",z1ߣ-Uo&jXRmc$&1n T6ՌWDPv wv."8?GS.OVv4p~ 2ƏUv MTfi޼>mɺ6u $i0 ^iˑ^ۇS&Fxwwsrkyæ?1,!Lk*w|]MtZg}r]l8ݺ];~ܙ܂1E?r43%] 1Z'+ki QF:rUw2<seX˿~g[ldV_i&ȩOO^Gm:w&53|"tw/Hԭ2pcl46s ;|)kgzyZmVΰMx3jSe"͚Wi6]|]A(h/(m!qo8M,i%]P. 1I 0\$6RHpɜzwK//PFD6en(z0& c t"yif24*%q^!5' n2v,c'ƅZLbL'N$=`&-`x`50fTc?$-}CqBO,i"qMbTES+ 룕N`m8g@HZjdz?zqץq"huR}d}"K0{#'=nh۽k-@ L}>ȳHR)JjLVDeNE/b={>2_9PY(s PRj"ȝ "tJTFQ Rpc)Tek `^zN`9<>eHB|wPسa˜ C4jdqF䝁8H5̌ pU$iqy5fVB9|E_FJQlQ畗Q)"#pAY%Js!(\Xύ$!JʘT&H@xF1cJ*Sbl^fD]/`1㧼&Z'{J?߯4XiWQ[|;AL*]nԟ.y:Q)Ml":B*t^>>h8aܦ !Hǣ Njx L5,ZE/s<13KhXhcxgM̝Dʒ(&g F8bl6ff~m)̶˛mtVT;E r{M9f ]w]/gM+wǓV3KW_m.AG()%uݶ.H]M|Tqlvnx|۸ȼ;-C|[f…NyDۛyvoKܺM;:lRן7ݜ$|Jm[ˉ_NwhDnRwZJ)slX찙 ڵicHrh \Xj-$qon.d D=ujkUHJ(@I ^YҚtJHJ4 K+\!ȇX;Nť ;>EȤS=0SôDX lTȨDS)ew.@aHG9d|Is,2 7j>zP 5PD@ 8 5Wai<\w:bbz܉Y/[6gQsQԗ*5ZDtB >QK`ZD"т[PCzFU !9Gr}Š ]Ok8l :L$"&|iXL͞V) iƮ{'^ή22^՞rzz3ov8pV#%8 5\#H**AJvxtTb #s #(]Y,5OÐ=B 8nK%A1TB6`d1tRln< wBCŴP6v`lNq.g( N,pXC(6xDZIHeӂFȈ )`yTkB$^ȴGBQ9d5~N,cW {D{#xi)OW0S pƇHst ,E=CCsIRQ k2 ǨHVQ𙎥G,&fqnٌu[[dEl L@ c(2qְ//%PpyU|;ɷߔ@q@)PJ`gԊt؝\NOQD9 "pMN ޫhRN2A K!iU͛HL(Em18(8Nj"0 g9 ao1qR\x-rw;eYqJiV&~UvGoЧ٪{+ [XhpYtUL!0ws0tp ,Hsc%y#{g1RJ[\nJo~G@ E /`J <9LAގLS/i(UhѢ!DE7]p_kxG x8<,cUXQtUbq=#+|ކʬ`Fػ;_]#eM3;`"*|S?6g;3*eprʆ ٳiȍ_izٲJq!sa \LrB6S{5JymofMhG5pkJ,3Kk7t 9{Iؕ~Mm>}&;q\"J0MP]jr{Z_-L\x"0"bOݗ^?'k bm6GqSLo'[6KGByHˆˇtEfyGBS'ZŊ'_g =w29˖ONGMr٨*Q33uTLFa(T~C?uv؜x Iq$槓JQhU UNcBP Fs=4/@xlm9Z7}tMG[ _8DNڐ ph5.220|Jm %G)t>ғo6T^H_ ъ0KZEFnp]"5˙~e:DV2'a&hMwʙ(?mJSkoJ,Hg׆)ScrP{V MKVJ=J-8kq3Ѓy| XbI9#b6z&aK~@&.R"2A&WKѥNx[)z{ ;{4gzK -rǑa Zrs.K oɿVXE-s`|kZrٖZ6e.Q\v-s_L\⫷LսGvPA]ݏZ ϣG=ԕպs_$\B>TiH~u&$90Axbr_>{F?V ɭVs~G\V4^|{y |4̥5]Ur[t֨MWD`SӯPM AN"uE#QW\ۢ %puET+TW=Ӫ"uU-PBkTWJ6+"uUȕ[ fJ9R]IdJ+"X#l"r5A]jƫBWr2h&(}Sᨰ%]l)o.RYY<~}1YRas萣N2YUGoMڣ 5Ҽ[.*4]|n^4ˁh)ҍ<$yZU|BF/1 peK@T1H"ZD;)#+:m1U i L%nD|S[*˛{V>k\#b]-ݙ G>;}e9WB, 3gVǿ&XC߮0s-Qc|}y >)d~ӕN\?mJ!, [HQ^Q$( P{!z%HKyFv&7 `b`-ZϓɂKBdO%ͳt*ed޲D&̂w9byZ (}&g5K-3+W}IyՂ.lSYK5M.k6A.e ;O#첱JriuPFkDBTT^O[ƬZ$k/ѣS8ҟBީ-rUEnR J/<.vjZh8^5[ͺS{II%'T8}8yz%DctJɤ;M'Oz} I%-ArMKJse.:rdp,&'G2( /1ڎk%F`0"׶V^ i憎tOg< sBoz48Ixz{IdNmGGuԂ)sB#\c`&Kf,*:|Q'kCOڙ6DPKkRZ)T,OmĘHsɋ2hW5rNډ:q/ dy˓6 ZOc}[ 2M t.b z!֡^&.%ƱUt 9+^25j$+đAx`C .X8G )}AImbkү2G %ər5 %.E:kk3r1+D 3Ƅ,GhbFیKl1TFTFuy#Wv X"B2 3AL .2؈@ߴ4Pd_ /$ pi,yP]S}9t*EVi:U_ywSN} ϟʸ+vk^& ie96冖NWG4k5®4m?RW&s7v%,>dJ$E)PHD49;54>jh'|pKKR^hUd6wTw Jf갗Ye?j\$Þ,tP6+PgS:}*}?&7Yk$uAvߥZvExn"g+=)hq rA[ozB.m6Pvӫ *5v^a-~f/ٻ6$Ug7.d`.~Jdەȍ)%M2Zm:^g@bǵshCSj{ay7f!̡e^ݻ4z^4y Vvy~knX<.kݲ_.4-ƃEDzCFlikB|s_ORk{Q5wQ>9Bt3.q9|-9|GƷEH3x"Vis 5^RgԿD'} Oj8  x92 \" j5׊v͢! _Lp +e%PChCD3gkBݠ866k2Nz4j{_0{?yhC},ĥtRpQQŌ4¹Yj,iI"D"|lOK :H)"$#J( B@@(viِЩ`E*"ا %%%<8%|g%0l\̝C>1I|D9o7Wz[&wRw-QiIv@v`C o[gx wɤ2̂0h `|*A>a`鰐9f:uH2%2h2'U$&y, QANj t˰1n Qc;Iu OIt"yn {N"8Ē'ڨdPDU\j-r{G"0՘iPKfޫw [uUh]. m#QJQȮoظNdGRB<1ph}93EfABT)FxH NW[zZL6z:=>;{O v,w{D$q~IkO4܌.q~$/'3\`8!`zY].2JlEUJvFIQXţeuce(^ f)0 C_z)W9˄L@ƅIf:'jZ?33 ƌXO|AP-{s(U?)um nA=^h!Xt^h SvҲLY \Ir)17!  5q`UZ$'4CI(ښDA9?yBE٨?tg8Z~;ۛ F3Wn]au:7K.w j/aW˙aR땪!nzTߛ7 6RXy˝6O{hƍy;؇5|;2ȑ1V9,hD.E1!$$Kw9jH?8LooMMݒpZu|#ِ\dHI7n!ѥCo6?U?F#;oq8*7E|6OjTPe7-;o96 `cDP$8MHC׼3hwիZ&utSsc,PţN%ºqkg9A:߼R(tE4őD- [M`6125" KϨ ֹczqd^C謃/@>?O_U'dDyB\XNd*"mI51O!E!: n^Z.^EO~Ɗl^C{t3.K2<ƀrwG/R~hO:EKhirbЮH&_*ߓKUIo$\' (Q=Ch$@}"DvIKZ8Dg`IzR'Fz$#cLDŅ)C} ! xS r /`EFb<7w4ޮ4Oke ҅4vTu,==?VSEƞ K.i^k3&߇sT=&FgvAAdcgXHDp꒜ٕT/wf"Y1xs:5@2!%AY.*'c*;}J0 `q1F'au}Ʀ"el-;zӛ :y*FRތ3S+qz%yI k>:H˾ɋlY<}7rK"{3_pל'ek.De'g6*]T>UЅ(q7*F.D1#AyA e9GѪ6([J}I`w2HBֱ@+xM)0AY ;EQ9HysQ e cm{:u*ܽ:,BlKnC#RaM] ?6βlnRwW_G-K} a`C$9}B$yü M˽n= ? :+gU/EuZN[ID*$CR淓m;PB䌂*gRbr)hOST%>=/O} I=Bh(y# ^D/,F"@QS['an '# &Di.-]7h|1gy:?ZK֜N -TS|;L#k.P9yksݩ;Z-Kmc|))EI@(Kd68% 9scg^*xO~H; </"ecŜ:FVJ4Q!kf\ E R> }`Hb(jxT>s>`.0$(J1,Rfl)r3jglgp`T>&)ѯ$[X<;['i%hq4/ͩaMNKpPgw^eOejY-6(u;;}KJp:xO>Ҩ`.rFpDDV&u*h-RܤP =)8(XoW1^`A T%Z2vFv؝ҙ,le+ IDzdK<̓OgI3n9.$cMEW r3>ZIĎHIHNbk@I2JNe5r ).b l,U42'YBITH'F%^F) Ӂi]Iձێ~ blu&'}`7b6˙6 0+l,ia$i ZRk46 WSyY1 !f d r޵5ؿo$Lռ8Z2F:|6Ig⽨\Li^uK=Y_g6nR% A*_ ቛcm_uzl|KgwUrw&#;z&v$< &Sr~kz(%a-%秕>KοI$>;if%C#mYA55%ٕ7o8c}/g[wSp#UpF5_=\)i+g5FB1}݊$J)Lq"Gd\'3#@SoШڭV3n% 퓂{F;Ձ/̏Ix~sp9Gy&v{<*s/CD7JMJ۬?RF+i+C{:n`MF,ue7y#^,X8y%89.0G}Y߼ƽ yDcD7@OV>z+ަP}3"Wm$ca~9ԫ4~xZϯ>?-11}0B"qaӸ1mχ|<*KZ^o70 wtT}C͙N:mw8.aKŸG|Jd(dQY82[Kj`GSh/?jTuxӣ|3;.rT|-eɵ j8ˣقyE~@Zegǜ{ W"%ae E=j̣V>X{[< ZJiԩj?"ȼ-s@[ 0x0[ \kSo4=~kaZ층B i047N$pܫ? |0pkͅ}i协prեWܔXp5|W k̵{WJ#\@ yTbjP 'WJ#\@RHv@pƉ}iw|\ڴ{WJ:]LROzPp!t@pw$|dE̙;4:N~9}vHt+*K@u&/o5o?Լxǟywץ?Ӈ5Rv\W|uɵ%^_˯c~l?OwE/xBzw~g׏[ٍUxp\$\rn"[(&Nܣ95ʯOӲ#qss;<13yi p#PRρmXDAltZB$JQFcff곧۶ݫ1?d}_X{_#7yޓ/UN߷륹cW%6lJB.nU&$V4 {Z*1>W"lf$RZ \C.$BJ!W/[Ř3cBNk9w8i{ݧ}aHah>$%j qZՑw~~J˗:y[a*`RҕtϹ;7RBLµ[/eB`27c1an0F+`truҨ%gi=„;<Ù9qYYێ!phJ%8KGٖ8(W7p!_ʘ59D}| JXhDPncfֈ1G1'M^ GpǬ;ȼ7w\Cvߴu` %SN5vWsk8SB%̀śBs1lP<@CK6_s I:%r) .T;)0*@%x8>N([| X Y_&$ !?zVWL"\x>\A\pM0l+0 aȅ w((AT( y&#>p ֳxX}c (2châHY8zVc@).bci <72?RH 9VDp|r\3)% @"`)I1?],d0 .>pMg TpG O֐iA@a}33A: dB.f WB?Sqtd0{=<5!8 2q, %8H*t+ _?x …L`vN^m6KG``.cdhVB4Lpd$8D){RSoH0K.0]pwE@z3!z c+?nJŬ|sʏФ64<(\I6<s7%g 9}ඪh /m)~cy]^q{JW)=oIK6pZdOvZ WBW\jQ!Z1w"ݓI t:J OJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%E@vv=J $ Uj?x%.{'2N}=)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@߯jkRY\\=J l5J t(ʒ@kEI DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@:Y%g芔@F p(ezJ @)NQ &@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H t:J/K땽mg/gXjnq/ߩ\ݏjzDjM5.I 3]=%9:?xᒄIp.{=?ce,#ap>.]GWtjXt+NtС\q܉`vB3I ZWK!{xHa,rVR^M#`cik]-4h:MJ 49µB aJ*pEu^ȡYX :—`!\k+DkOWrg#t [GWXj jAD QzJOQJ z+|K#&)9xB] ]Ywgǘ Դvut,VDWعj *VMvt(%:AV3*+l}5tp-~t(%CWC/Θu:2]#ahvCV&*+AtС&V {cΤ0+jbZM]GYge89g,u .ٍLZ}lrʞ5ͳ\$͗+N(M 33{>`;+_;6,]$ <-L./᧍C6?Nk{^gjcoTqIl(Pɣ`*׋fUZ\NG(FOuʯ?j8jҾyߘ׏?#c@qh-kQƇ6xMhsJG3F}GnGYE[Uۨ|uM+2JD[cFY2Cd;>[S0E40&V&9'.y5AO_K WZrC_Z@C+o Vxau=tpC+@#:As] EWWZ Z>tBN] ])/WCWZ 'RS2xt=V5EWXz :^ ]ZDWHW;eyEtuEKWVCWV>D;CDWCWvBc@ˇ]!JAkWHWijZ֓ \ͪt%91gؒ#0H҉]J]=t!;$]nXvgZx5r~[)+i섨b4}4thiZp#B^;>։TJr_Wt/>: 5~t(%:ARB*++O<8 C+D90]}q*+I5U]!Z=-D ҕvD07µBQtuteJTDWpY ]!\ak+D "JCtutgM;j ^ U/d@ NY]`)|5tpDF +uϡWgL [vj u\: Pi%t:~fzX3i~+[a4yUm9lv1:opE7Cl k9 ezDFY aupꝬZqvUfi[uZЗ\Z;`*87.̸x:p;n۸ tqīg~*@}r5&iDS>7! p-Yjs{MK[\CXЬgW>5-όύ ?_.;7?sg:!*>=ߋJ~@ B{MPn_k?mII3$ N./Irz.Ouy)=d(Vi%|&LZm-[m\2yC ,M.yXN"mFV)^R66W,>=c^+ۨUTX§`vz{d8h8.5{Žm n/_\xfhfκoص˛z5AqY&HͧWo5aum I ȗۮ}i:I )/vwnGӥ{~׏y߅tpR6yn LV}mc+s.Nڢ!Cj=o]2,8E] s&?;gE˭ѫ$d#K(LGg.D_KY`mVHa<`ȅ;e0VA습IhS̵댉Ee~{YW}n t/%'B 0juhB:,g#'zuٴZr>]bO!xBI'a:GߚT|I6򘽲>n0 6B-ZZi56 xcUo'p}ݷ'f.;[0}lglzgN,MD]<1[]Ã41 ʗbmEk`CqLgzv>q{ɔr6 b56lWr*) ks*JAb"=XYCA80= %MwIdI_^lDo0({m~ٶgEF/f0]g)]-%G2xSy_|f?.5җfl5Y}8*l[nb< hb\Wqaco< KRW8[bCrwe1i??ƣ?5ݏF0Pkm=۔ ~(ѻ0@Vj'| ~v,XQgwɡ珮FOjo\緖\/rVTܳ5ܻ,cDWEM sn/.FWnݘx[ i{v_4~nU. !H8OL<8X sYRL:rŽ8o_I ĥ-}>^z4c hF+'0#EIgѧwCX|R7y9{3 [ܯ-M E+Ok#|q}V.WeIZ*D{;p9i*&1~T>~+i qKׁ6TN`=-mWl):N%}Y_Vz@'y76)4-W^&ilQQC*(W#UOQnj YOI*aaWWQMWcUB+,[U*W㬔 x-xz^W}ܹtpC엹N>qWiyĝcVAs&` ڞW{sJ↗Xz{y7= vgo ދ7{=3~Rv__ 4YA ūGW; ͚ts=is9 X5F\w=& 2"glb)G(٧ dr =' d@,PBE$^ X`JkK,zr`ͪX"SZ %f(RkUi%w1 \  7"(Q !@ٻƍcW-q4\gķʾIeJ5O^ JV>$(P]>0i {Ι)񧻣 -kyap i$Mћb'yb|F;jzm'i#^EjSi$cW* ܓ`L:vXHT Cf!SDc@DX,- Bh6tĝ[kxSr+T9i F=^)n7W\.W_0E]l uo qW">JeOweigbP^.o\^U)G2r(\Ʈ;[`б${?ձXv1VR& 1A}0I9ǩJĨ6xΨ&}|x%yGTffל6ڞ¼Ku0}2l?ٴI37p`znn5p\(wp)BSϘ8P=t:F s4‚Sݸ?qo+yyNz-ĔF֧'iAt=_~,U3|^1e1~TR:)g3-|8Hb8w0 8+]N Gi|>( 8ҠOq#I': NyFbҘ1t$r 8M(rneZGEdQ0ApJ))$k7ٽ6:_o < ᧴U4y:c׈o/wx~K7uC ~EG`y=k'q)~-G`%i}ӏJRrEt'9%t}׊dq960*HEuφP6{6|l8oSA{+T:#XRς;hщćENS6,C0!hY!e-zg՘IDd;7MV :fÝJxۢ MOoyޚ$XG9-ڃG4aIcƆ! wD$tL0{ z`8N_rz\DE("E1('c$}-o-H] =JF*z3Ξz3wKg?}0dM{Wr_+OT,\Of,v~DǓ g>Ds3]T~Ei <[FQ7UTQ$ٛ$dx!^ˆ<{FR{ 2#4N_Byn>3p]’/ TqewJ}q}l8(tgYZkp̜}nhYh$a9Ô\8f0Z$'4C 3²B'wԍ}h6*6Yhe o|8њ^n8)0UnnA 6v +/a-'f82K)MZ .Gк;[>y],33XDB-j̷U^δyCϮݸ #yJujŵ_m1cUΤ9 * m,pgL>9Dy(ZN'Ux:+~>K?_fmrlȁU0tC3 ْTzt3unw9`-e3*e%aY?";bY>3ߘjXa؋YK HZ/EgabP dБ - d4h8Rna#QY Oc*Vdrp-az<6v&NKⷰG+[qK }y5%o/K-)gK۝U eFs΢6T ("S,QGXV2qZN"ѭ!K$b*_E>fTxG 0:&,{) EwZI%hD0AP4@;5({>ݴ2$cpe/E(C`J#7 ^I .|dL /2맦皚Z#ra3FL!jʹLZP!8KV  w+#iDT\x:$ЁG)8`'9+aYd$Fa[ ἥc:92-ͤX-&ƅβYT.U,ñ3ô6v&tq)ذP7pNS3gُ߿ϮÓj^X:cDqQyF |"vrec#N"dJr2A'RP^Eûb$ u^]_U4c0PetHY\\*!hVibqƧT衏pMLiDcvvvv:GQSlb$mڜVɺ gPi+>^'a<[t6/cD ̮/ޭ €~68\^e߮,d臛dG`-:wF_3Z%7t inFfV{PZF>aiwuG6׃ٖӨj}M6+gr2SZPHjxR7E1Kfl<_ejRcyer+st~m7wywo`,8NDP+j_$SxuWMC{uTM]Wޢ]*vO--R(=~uPs~W{;ie9k#.#蓹?AͯgUURTХ)q׾4! baGABhM<#n/# L=N:|KGDBޕSEә 4#=bCmR{~?#c1 )P[%Q8 Etp pktsT+W@wx+==egy(s煉Xջw}P\1Tl &#ϦaG{Z<@rFϵ,wJÃ&mrXj[A863q}T Očd.pY1F: lp6HFZ{1[l#.{zG*7A{UX&тw7C_t e(A4~C6Nlɥޟ՛405$5݋[r'(kwp.ֿ cKpP+PlӆiOR C9m~;7 3 6;R9  wAsC@> =/w}H,Jr^0A)74HC"<"1 2K@*`#{#ÜHuq^6gZ~|? ZCƜN -TS|;L#k.PFy+twN)ryi_18sPـo{~BKP9npt?fmo7_/"Y܇l ,nv1؆$_6wUORK-51liFzzH>]dU4^O]`bn%3azBi ,(a[#>A;Jj.R)7їfeK8x2RKij?봶1P!EF_kfHQ"j- 5HᔂFp`MC+:`ɸp)^VTcnKy-d41}RFUcL+cں Hǂr@@|sIT FZVj4Yl,!5mahEyၺ#,ұI,]I4H:=Rzj9"ts'J\ܹSàxD4 ,D-ؠ5#Ji))QY16)+$Sb-Ԛd.N;Rɐ4yi3.sUsb=.4 Po.|j\7u.Kd|˲7ˎw8<_)~W)k)3%yz8HshsGJAe0T=h~E$3Ҳ,4A&{#cF,ބ+ `\˵iac/s7`ًXl"ْR㿣Daق@oUA!xH1ȳ6cZ*ZYcNd5 #C|`'P/JOc ^Q\ַ{ܶqݏNk|?^-w/gEZ^#ݷo^\|ʟoQ/{o6Z=9oy<e>׸g|Gk^~;v=Sd[vfv~Á;^^/|&@9A?WaqDާFK^ޘv!|,_]}8WwlNw?\~ioA^}/{^놺go/{ʿ`.7Lؾkc7Ovw}v*Ǩ^IѫP__Û^j?QYiT-__PZ=P턙`JobNLF%RykO@,GQFHmU<_ͱ9lLu EVj%RVg8"tbo=(]O|[e@':@\9xP[5}8oR4a:JP0o +jd))D7[2Ak#ø+Ŗh\1 T}wOD%M&rJ(2[-ZY)>'Uez}Q!K6[Q&<Q UAkNDU1mu4eN#/$QWWrOD[7Y~?=XyuoBˀ%Va@Sи0Et uQ`? rˀGBq sdSeZ R365rK76"~f[%* '7;iC9ljSLIl)*hSo5` eDs1Rz]Ft-x 1ɘMԴn`#B(Kjt:ppK>+>A!곢V2,$m㙵>8,m<`|ok b9ʩ)(#!Ŝ-M rr2T RSa)$=;E/ B>d]s3J s<]шg_IjJ}͐'k_^,Q |)D;JqJ-fKUXjDKV:Ծk.6TBmxwf}X+|,/$ }/߂UcU<vV~=_nwl֛vGG]H9"òG_?-o#&CFT?| [H+$B廸 9&$(*B'i*OUK 43pD98*f$k1Chb6hQY'(7Ao ҵxù;$B=D\>{9wwuLuΟOå8JQЧo[Ra 9o.vBKBh;o~ͭ @0uEWBػMWG+j ]10FWk:{וPtuޑ銁at%F+ sPWDH( +uv]1-=u,x]1eЛQW6f ]1H](ZRP<t5=ޞNv5he]z'hѭy+;CWvC^k@a`FNնݷ76. 3){jT_}K qc'-9w{SN8T*8xï;x@f`PaM ڏт]Baj^_嗦&Ėцeٽ|T=Lu'鲷8nzo(L5*SdD& FFu*5#mH6jlfdSF&e4Wz Fb3?[p0+BK)6Sz)NyMqJp1+w] e𛮎PWR T+F;Qt%J()l:B]Y+&atŸy]_<^WL^m:B]9Kh ] SJp=+ J+4QWi5xJp0kWL +DuENW5ڕ]Oj-v+ cUpUHWL<0R;ӒJ(dѕg%o׏ᆕ׮fѺt5tdЕtЦRPw'Pڜpqlu q8Fӂ vM ^B[^MϢiCfѕEWBkuJ7]#EW@kK dEWLk7EiAm:B]!y@b`a] .QtŴu%t,AJ~= Jp0kWBD+MPW.銁afpYZ>JMQW>p`mҕѕ08u] %MWG+)}Gz20~3h= + +-ѕZEWBw] eoզgͿodwڜZ<Ă ܾuF]7u18x7f]S:a -Ôe )4^XSh^sۚs-s㥭,r;uo_H<\0,-~%T({K!--<h+6lki] -u+J1 F7҃ѕEW{וPue uJt0ѕ>J(6d_n&]LjךUP4Rwy a|!ϸ-|\X>U -;eb!<-jp]̔ 'O_fh]2ShF՘= )cT M[m}ʹRVUu<; SMV4Ғrg?-4s7ʾuU*buXGxF;t*dՂ6'FI)ַ͉ZCB[%k`Dbnь}'+S4)|n-6 3HXR6w窻mLd()5ysYUȫ;;]SI{k <RIIT1ڪTw$#W@x1vnE"N Т-)$qu$~Zc曄E)>9* X )$ZjC_2"$1ڴCԨ*O|"GfּXL)&>*C֪+הOQਧn|fnJ}Q\` :`֞%E]#hdGhOPw_Kԑ#1iuFݚ :%@uPiPmJ7#XLD$W{HV*#d`1JrA#kP[DbYX{¶0VA;> VGdmy~zoټ ȓd}ȅBOeL3fXbM=A#%DF$rm !/fsP6DRD :8Blk ) Fc*2{" 22߬n$Dmy F `jU;R@4Xq0=zyߜt:YgtM{ur~Fext5`0uvMr63IҢG:4'o@(JHu6tk*zkM!%8OՓFCoׄ b POz~3=6iV{ұRk80)QC^":$5\Qr#bs"ڛY'%\IbLvЌ2 R.1#K[P E7oŰl+TO+IF)ډQ'7 d!ǘ"Ϻ|AW F-8qBQ u(Fb&zށ U @*m6*RU &ƀ46 ml3`fJm܁fzP=Cɗ@{&Sb 1KCbR 9Oku] vKixZz% kJ (|uժ(Al*J 'zʤ}!)AH@5>dNYkO^3 (TcJ ]\1Ab~pQi8 0S6NMpU]. q@C, cV4k&ǣ BE!&MSJ,3wR@Ւ0[ Y\ yFAy58FgAAhklY?݋7wHۚ u{F5(햳(NÝ!pmUv\Fѡkݡ{__YEU{w Zois}컾lC|vqu~U1 ܜ뷻=R\8{_^6@QOg;Ǹ:/4f/|IcZ~w~.r>=~> xuc>}?ί6w7_:MތH]Oڜ^Sǫw_~NBnc۬>qֳjUK;M9v.Z_k8n^aBgW/ 3x, 5PG8#+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕpM WNK2\9 WZ-p:1\F,p%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕ*~I`<1\(|^ (k4\|Õp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J Wb+µB\ \Z Webz _=9~pݬ?007M(!wyKͻfwA+e9Vf V?߯.7C0+2R9Ǯ бGH&慽=>3g&>3L|f3g&>3L|f3g&>3L|f3g&>3L|f3g&>3L|f3g&>3L|f3A> · ^wzbhw= 7@KkjgoX@_a1.-.^-(m[+iDW_ ] BWZ[6q4BWQj.\uNW@d^!]!rK*X<#e5j)t5ZjBWac‚ x1t5h݋WWePBWqQ- t[pbj#zr+BW<+>] -`_:] Q* +TsՀq)t5zzt5PQW0A/i*nݘ-72 ~Ct9H 0ṗ<'Ӓ)E :|R'N~NޢgRGQJmƌ6>fpx#ɓzm\=(5Agj)GQu҄tv!;;Ķ;OG(ke/X:K7 Ō93˭vN;2Nhk!֢1ok!%ֵ ] sEe*ָ1tpus*%tPR+t.S +l ]%4&tPm|KW]+fj 7g9%YgNhY^!]q +v'7Lh%;]%Jt JnJn ]%/t|Z%NW n5ҕY 6\c0U]%t Jtw*ּ1tp)jvUwJ(nUM7)*nP1UBKj+8ݳi)Gt`+]퇡e/j? Tz]іz%E;PNƸCHpcUz4VA4 5ڗ:@WHDd*1t*: e6tU:+~*UBd*lWIW,7IJ%o ]%ѮFtPb++jCQc bBW mȖ^!]IDx : 0o]%6ZENW -]BRIDW +Pc*}(CI[k+$M dH_* ՞JZ۳YaDkj? |c  C_~ʺ]芵tulcLc XUIV"]wJ( kUҕ\h]4?"OH$Ƙ? i\}e밊IҔ؞~zVK?j}!b_ Mqzb+͹Nf˛ʵ\Kq.pѧX:[uٺ2"0ot m8G"2m 6z5Riu\n].`P_E&2G"\JlB $4ViǬQFYJy>P0\5lFm_N/FfC6ٴt%xQ7h%?I=N,~:`\?XI^}z<_Sٛvhfe>wud2RU^r%VAn H٧WOg}mSoGlh0Y~\fؘ>8K-q@Թ( F5$z)DRLzbT o:įlaa*iY`I?\ծbb>X̦Ah\`j4GUC2W#hwzU%wIRӰ#`xZyc'Fwq'ղ_3]\#S2"hKǿ]|oJ߿׷x LŻx[q-0 &jI)wPh(oU4UlߣYUUyGOZX[WNcoGAj2|a;f8gMIJt= a~Ue҄]B GvrfQiQVìGmm  ?+y'T@z>\I%v#n"EHje%)2Kkm?ٰ=ѽ\sIη?&]P.%E6*bj#,paQѬ-uھ~c"Oz32>`'Oۑ vNt=9g<0\Ao0a{0="` I3FdϠ #H\LJ%sKқ m)@w*UL@4KTdxH~B/ C!fH |rVk/;^s @ *S/;tudKT>RUw{e؈svnC'G@A0 ] o}(nkhwg]]蛟GG%aȇ`̶,]y/V)ҫ6 pn`[`A*,'f*?,\[4L::ru+R6TEGPF iU^HOqc^I=Bh(y#,D/,F"@QJ=O`PE*Lz!bt/T)Pmt4oMV[rJr^O{8+J o* u s;SNVa\$Y#v&u LN JN*;4s;bQZtn0 wYqjgwOLqF' e=6RJJp`t|Q{\ i涑i#""ɨF2P2zE1'8K[M*Ffd j@JC28O3cuJe\Xd˅b.->+Rfp{.^nǧ7K䷹ ݍHp|3?9cGK$T&d*y`B8iQR%`aeA3$cx*%$ڤB:D{1R:"bƮ ӌm#abօ+6iY`,g(ì>9N&H#m0ՒZ@q%+ȅ4*XȠbVA ('@q i>2Ga8*Ṫy ~66}Uq,#Ҋ2/ f!$`I ڢt8^ G`1mc$ VU͢P29pFBb%#1)DXҠK0I̥c+fr-b-W|UIɱ+EbˋzVȋ°L$(zOL{jA!>?#-/>/Ytˇb>كA>%mKdHԫE-ŒT[bTUD=4|I*,WC{eBԆ!D (u!EEMJ*:֤]ޫB Hfg@۞1-u6`\ڒ_>U>K*T>r>}{N_>}kQ{+8j}G%M,YNP־(;\G~eWZeIыncIj`Wtr߯cӺ]{Wo݋*L:@OEHsBqu uyI] oA7Ŀ3 sX[JxKJGSk $'THlkm:: 堈-XQ嵓6RHD!MOF0ZX5H'!Pi\t5{@Mf#U+@9S&J6+ݎ7r~T@ F'}CIQ bCQ%RVm-| AvFYCXcH`=|<אYJ ){t+hR(F3R?3A1# ]ab( ܽ1|m#l όG@qqoC#[еm&E+v:A1B]5f[36WFvj,:Go[CNl9`5"j#{嬲CVo X&qPs_!7 dpju>;Mn%kfy}lwF8#1GG>.n)mx6'=@mĮ3DaP!NU]3#$z$DҐ^ UNV"mi.TRE\\(9GڂoGRVp(YU!6U&(U# M~w^A{Oo<k!A'a7™Kݷ`EJQBXTKPK$k13zNAVCH :BVZ~'gPTބZDaha* RtlHE*0~T(yY|Ls_<1,g@.f:`XDcY ư yħi~]NJ E5#h5a`qȵ39Ԝ mn9bY5Z7zїԮymYZ_wkkABmy ?s:)G7Y^X/:rR&G Y׌,QOGGwϖԂr}_|LL6F?c=v omto*+7Q7lkv_{{$SցֻzhOٷwB,pUeH|APyX\A8}1J ;zK>h+eaŖA{1Xk2TZ|Yu .Wlk05Q"(j~YISaWul;'cƀ6Ǥ Q-=/rv(zC*zb:bnϽ=[4}jv6,]{d9KCwx bFz(*.99`_͜FׯeA4*ُs0ݽ^N[#]IU1vP w=|&ֆ?r>t'f; [5-aQqCiqT¾ޮ =d{N~36r ƽy׽[в{Oσ`/r={}qfk El\DcU!zj,"$}Ú䕳9hTN]0(.Ąϧge,gY\}]mRvd#bMh;Οqä6rW5Q,}FM}X[z 8<26`s\X1ZEA愬Lj+$@-6ؚ4z?vs|(Kv`Gw g zzy2^j. >ZMŽ ڦe{0⸱iah S6)$Jk4Z03:&s2(ښ5EC(V+M==* ~j1-C;]Kn~#/P$l}Sut|@ӃW~_ Wt_/IP.}ujX0t+L>+^^VBTE(6AoqCBV<,70kTiS{, /})'L" -9S5bѦ,T wC!2^FΎ) m7,'ٷ&&c}n+}N뷶!M.x]|QλF죽,l xef ~by͍2EDn%mz,xvnWD IaSjeu'VwW_^dE<޺\^PVv[qx/<ʷVntzc7 O'u7W>rU&MwnV׬][nfX׿??3n{no~A_~N\\{5b .Gj1`g)詿GOw(7?#^پkkzG&lb[ģ-֞.lV Pih-zMoD`U\4J*>^QCEsNjzvc Uw$'_>\G`LaT+Vw>by{6X|T I1pމ'Ů8Hp , k('ԩ)c^ZLbCz@@9^\|:!<=nېtx<\}kkŠ'Z|?8 :ig3b|,ĄV.j4UA@UeTFXBsQ{(. >CCCo4>N:gw"{K}(&E_B޸Wb"Ўk(rBSLjٷQHd.]Y'Q?A?e|onSφ6/IR4J Dfה,,>_%lʔQOg׏lmyi. *s;˶1,"IQfQfDָwieERɾنڌo.Axlg׫lbf>),!J{x_tOgG!m.\0z ~~fH=5m`<9w!'pc#Khs:+gi~jĤ>CsY&egd?؆wcケ ݒ0,NSL #HVSjAxs6YהL \zF0[q;kv mil Ox<`4TaB'3ɈTub#$8H<"eF3=m,-t6ސNuwoM61g.eu(] ݲ]\w\'OK?ii.EW1Oa'bZ|p2FLq<ˠ)pO.7,95|Mfpl7_nhD&dnR! A#8-<(ƒh(m0%»4Nz Y5Y5{xT 5-O~_?/ ʼ_yS'o h:yTl2**CϯS1X$M '001gd</O ^S_BAŦ 1jE-F81p;QhN,U(d ˀ=R 9D#N  ER j28rjr|`BPЯ>7^_eJ)U 5B Y9^ÓTbCi&5Aj*GQu҄LQkShN7gd,X*ELoB A*f %|u'$-%QЏ&gaZKnJOZFEnV7jG"\J K󨔢Ҳ%QZʧ%?zy}COG*"2,[oq'azqaHk_*~/Jozɫ;Z;Mp]նkʶ-]UNMkm%= ^g?I20a3Z+XWVRӽ5-]rN+thClۜ"Jס+b׋RKD]XvWGuZ#yCwlXĞkrY&LLF^'ҳLkԛt0,+JqO 360Uq6P,?11MQ:k~`Fy19܏΃{Я. &cxlV/\ԩ8zV:*n:=FSB?zAg &nʴJ2p܊$*bS(4RCUJG^R\\ ͳx7긯5EYz{sTYf$QS-9KR 3|N&J;a; {ZPJ))3Z,;˜ܨgFc2S4`!Zn53'1beP@b˛*BiWYՠ.[Ɲq-\13CX13Ck/Z JH;DW$^ƥ;CW.E]]=]=AZ(% kZ"BW󝷮"֭4{z:t˜v"`:CWb]+@K:ttQځ=]}֜w"`-:CW1DW*%x*{zt( ug0`4(΃OfRD%x2=^] G>?Ɓ3baiRI˨*HEuuYwz&-/Y>r} ͏ȓ.;ʵ$dQ^XBqK="gD`I= S6 L!/%$(*0E51+Ǘ#cPNcQ[x$_ihg/n#esPr˒/az]~Ԃ_m{S:/,}ܻV%\q[^5hX7qP]J3Kg?ZKo< 0\{2cLU7יϊENA}u2LrwXf6;=ڭr_o'07/GquG{-lo#~D6Ew=*3v?:v0?e{\f*y6n2HᡵpR;`*F. 8 :#H 7g{Bf8RI=9zg\Ͳ `c0w=">Gqn}$\.%6!`BLB1jveZb"E#ϮiDRNJF%Wt~TT\j5MiE姳Ńʧ su< ؀|ۯ0@Wy-D:IK* pkn %r%G3AgI]:}&2'Yjd ┵:`Md^Je$S>s0C-YkxZ' `fIE>TeJ3`FQ$u(rPNF$wKYc+*⯉"İȲMLr`z%-8x' 01& GQr?4}iqQ. 3ϐ2 0`L;Ϥ )rn`I41{Eڮ"z 32f"S\x:DˬЁgRpj'9Ё0 V²,|1IhDe a㻺K8]Bjv^s%›~ttbs3ГB, LǺ pbg!vTȏFEzXD C,IozgRP0\E^1腡NwO%P\!@%aetHp;\;0JmN3Q¨ɼ̍> ]LƦ$2EmϛJbP蹕RAM7iGh^Is9 eرƌ$R:uR%Oޛ!Z~Px1چ-`/̇(ΫzѶ-H2G?"Ύ@; e%U~n-KXV Y^fKi 1('h؊qG˦Eir]jSe.+gmeEpIht$A|'yYUeuR?OyMlh[jaVM&ٷhi;nՌJyFgпvk]>=h?'eb33Y[ J$1m,~*9 UKP q}@a!n7UWr+U Gv\B 7zNow-iCxD@[%\ eI9%T%zjNJsd/>2ʲBo?잗7WO{oȖ C9wPOg ]oT/W G &zzKtr)Jmrf%:Ko_D[mGc!z|2]5"_Oѽ?6ӲD;!<%w/S`:L ~zk̜k8|Q > ^ʠlʡRlkCQت5{v-S胵يAXzQEo eIfkᜓETšV- 8bva;'&=g6\n5\z(c6QΈsՄ 2\ k T N![U"p bj< W%P,y\P]wzѲ)zӗd6㓽2g6oH&`G꼪{˷;~]y|U;oyu;ul};bl~WPND:^SmR Xu˝&ьQcl'3T3֖a)T+JUJ!G`T]$]fUa.tu]tAu Ex..]/r~/Ox}kf(zJ~F(* P"SN#`"SUBequ &X҈5TcS 0 Њ4S" y\czŜsn }grQX 0M2-eڜzCa!r-Тz$bJpһεH*̃0rׇ:o+c_5",qшr|`TbbTkʦӭbZc(ިW)ׂKcYu ##BP#LJ\Fh̓50r|^tt%\u}qы^|0ʼnL BBQEёr} YQrjA ^<^Msw`}gKZYǍ|R7oϸOL$i?! nI~dmg1tu7;4yO`'OX\+͒]gęy% mI{K 9R3 QX,  ,2"5T6jVeC) e'.NJ!ΜS XW%6I&yal"DLң\1Vj'. C=Ys/lhƛ_L}h WҦD?e!r@{1[.S#^vjDRB뚣謹ml)է*wd2Z{1jj@H4֏mwX/LBI:u 7&c] }i}-uWe=]uSEz7lwN㸠BGMãO}+9"xrD5zƒ`B6yʜd̡I $z$GC"aՊT8#AKڦF##4 (vY dl?:K5MI몸 [@řbMDTS\ @=g9CZhNh0GzrkGXk}_] Fp>/}sj|\tzΝ~ ТA HƔJd Ṋ̀p)'nSܷFƧU2}Z尃 w2JK 71ЬKBh&KըGHͤxkfn3{t튗^}-ȏjv:5Q΀mlbBڒL pR=X3|]rv9>1=V4Ņy^P%8YjvrZĹ TzZCZqǩ*TCЦ:qv8/ᢘ+,Up5 3j*clz߫.I 5ddbz{}SaqbkӠchDRj`0H$e;1{Ƚ}( # UΩ2T&}9nJƭg$̨qޘSQ#asГld-7(j(ВmTf9%^$wNcw3P93H9)',Xj>3Co2%CS>6!:p!,ϋ5TLc2Jqj$),`~g0r(g' {Q\[ H-DܲAn)z<E5GS [Dgj? _)'*^`zI`Ω"@ek˜ !c#pAr~IZRk1ʜ&0l%`))R BP&1|\\ű??ͦ|O̭q3] dO0W`W4ټj1:mHZ+ H֕:Ca ]1-1N=NO6LclDϗNHXr^(7yo*1;W|\R>LC|&7"jyBNaJ\LJjqqyN3g;ts>㊥q@ʸz *k= B[w _LrI,Gx:x/Gz^7jlPoGGw֗тz}k/柧N.NwN=Pְ~jv_=RA}ީC.H념ZOh=U/o6kSx{O\(H5@IyU6fy̞V@;P^S&0#G籧xD1V,=Z; Wg6KL*֗T,I s)Wd5CC,688'Ԏ^93kty:p\.@ouw]4?^OC06Y UOi8^=|: 믷MNT~~dCvV S᫸B)3ONd  dg hFc,6graaqeqJfyҢTor2c̭D٠@`mu^|^ұo~ݻg;P뮱zkish5σp!JԀc!) wmIW?;S 0v>lȫD$[vO>ŢD]DJe%LFUe<9`X| Q C/|J ZdTXN#p >4rI\IIGmߓ^>-}o n?P沭lTue\jlKSFץ ܪs:TJ *1TIV,HA>@#j".Xl:(&& S2#sI0~TBWw TkkvΪR}5@8 -䣧D1GT?lԏ'q%ԏ/"z"J~o9Cձovј3@BW9V|J"&i}cj CzS/(!PKϣM|$֏gl\Mf32ctı~|b~ש0B.|A)cʎĺdj$߰5ϛ..Ss9}ؽx)2a! C.|;_;_<^d۶?iRFjg|e29K%7rSl}KgKm Bs| onCKݗ<]5\zt5A-^ѥKhkETºV5qS~2/VՐ\`^׊FW4>2Z^?=]ΫنIoljȊFVj~u̷7O޸Gye0̷<>HYoUEG7L4 AshY\w4mXyyuvf+=*Ri=ur=7B' '*u^QдpK'߾zw5S(IR3 Jk-dB";Cå`/<>Ey)t@ t=E2֧4h=NqYů\\KDZr33*G`S!,uނV#l{Ah lZfURLg18L1 N$APGBʙlx !tRGHvW^3bb OLג VFlKTܕ[b Q,JF$3^JLDRx<"zzykyYTm$1Zf4^AH`h"=tU{)==WF$U6 _ċ7o{;{0BЇefͥAă%ydD K֥9icM;.ݧ#ҹi\5|q+C.\4ap$%M&R-t|.'ogī!0D9q%F<&@SWv[7*˼"_NϒMB7@?,*U9L &#4 7f19nNnq@[ݻVű_%r:Lu@t|9 3}DA3E|׿Swӯmom00>#C2Jʈ+" An+&Or1K1\ GשrKTLN[D; sL8v2U?Ǧow(/W].9? _c|Qxf|pɅt*ݑg+v51?2n (s3{nzzz̓hHw6Q;3_R'юL8rTB+_\eSII_J7ON(8gϚʟ_W_u}Aqw[v#dMFwj3irI'sS0C8ȇю"#&8]e<I=Ul m_dX}g{iٳ̈]Y\ u-{>L fiyLKgk㎻m^MC{t9 <ŁЎycGi&uO곥_)[Zq>[zDO 2l"`"TVZU]0ڊ}4 KـNkDMf}rʛ ",Db0I*JiكosbI[,yb]ǮݢG!Vyӯ{.z9첅1Q!`>BȞ$)ޛ3<$DIbxgUZ.1dgO @k%P*:٬7/;-On~uzܦXZ~[ZzK˒N՛hYw&Ζ 7hz@q ϱ‰$PBW~F(5e^ޮ_t]Ho96ѕ%hnnBp{e雖SIdUm7Ι$ e,-  mhzC|&t&F"!&ȠFu Ti! 吔r*jY01@K,dVTmb2gu,Pi *̗C,OJߴv%mXN$avz(Xfӟ2'WVdWu5^W,g٩ 천|~vyؚnwf˶qivJs˛rS4^BT6TRs9NyɕuTbdqWB #F}G<[AY\|ѳs>X>%j_!19GnJ?NƻZȢ9$ e61pD}҂ӫ୍1@L?ZApIMglP Ruхͱ uoWpv-%Ot_a̝bhE]!F7Ǟ;v4U0F%FWJt"XUqc;+WSP"Rȼ4%2/F vqf`Q&LȆ%2Ucv-Z$dR$+HSJRG.u)L(Y"+!d,k I\+0w]3q6S(tHhG7l)=w X`O}^hPp kִԛQ.IPqd%q!iU(PpJ[#aEnV,"r+cF`eF[+'tpY)+9/lEHl&=Obb<.ESDT8#@r,pFkMJ0"ֺvŬ1?.!5p4!fp\@4"1ʡw‘XSR2c^cLd|eB҆YX#}I]zd& ץאdntv%p3E"#hܨ@%zuQ]nj TJ( B% yA0 T6HE\нxp+j9a[Ի_(޵57rcl0lŕ$Uɔ 䒔f.4))GLlKbh΅GokFK:*M9)HK¸(VчhϞF>>ou܊\JNJKs[vMԉVN!yovise}Lp5&핖:mAplp$G  YNxChʅJ nzQpaq;=Ĺ%=x~LGc + (U*ke=RV|-%=HYmvDP;/z~.Wm|PMu1h@&mؕ(L8E8ZN .Wzc$N_E;͐cT=n" ǻ;&iB&k$]R QOV k9͚L_朽JEܷSHkW(Rnf+]|7..Z18_Q? sr\DNS&:s\J' yInFwl > ׃qt9w}Fѫ~W{#tu:.1Yq뢥]p1av1×5 8fI 0(\hƃ+ {7lߍXA!zwxi&{?wȩ~ GmM39ͪy,kG.H6ڭl۞1Do#ꡡ/ͻ6 #SWe'H^I#NARɤEtUpw؅V;Qed.%:Ho#fLB.􂓔QKq^8uSKh!KLE3 ׻*Hk8-NG5qHٻ甖Ht6i?uK9v2N`Ze\l%x1=^}%j%xur7&f4.df\$5qB X ^rtdOdeqxŪCw~m/V;y+IGήjvѹ4Ʃd&ZN`8eR5XS{RLDo2TUbfd:"*0prX(-w ч\" #@P<{|-8O+uY-]\cSEo4zvwR<|KVnLY-=w}%괲:w#jOn&cr}k0)8"FAPLDLUۥ˶* Ӻ6qy=|LU 5K0, u(է0i<_h]Tp:U-ޤf$zLjQ+R$+ZvR^0iK){XG(c3,D UB"ʩ@4ރb0^7[-4ٲ߀X)>'%k+[пN{߮mCWJaNKA>ܹEf 4dƓYd w /ݹKi^% ०2ZvlT#|TS1fMN2D~u-UyE )H&B>ǤQLf%9x'SAe5q ugRSAKAےΘriVvUQu!9P8/)ݝ >7yl_/t>a8. > ~m]Fmy2SIPRgq@T k1,N T7h0OoW3Ȋb0MD|ONd@35Ez3ےER~.wLàIdΣA>,Y_Da׽ OyF9YvF>y(nH>^pp5*nJo-W6<z7VI x*(=ș8U ) LfԃN[o! ѸPʂQ1M옗g9D#uⲷ<yIoWo^AlyBl Xn>XFZQdAYiH8&dJjZ+}nC= =%Գ@Qmf7 O H<\Jx/ 6z^hdq3Nc ].Et.`&$ ٥9loƏ0DT{OGFp}8{Il7xOz#1+>o}{MR}tV .4sËS&/.0Z1 @~Yl\./+O2ؚ%Ep~YXbT}0NMP3gSGA0˦u~gp9v Č5Pc%0qyQ͆L&7ѰyB_']= q_wh y$.dB+S&̅2b'ҟ.T]2~/+EGߡ2%O}h qt]ku/6S 3Q^5'ëkP|/t1j¨-hz~-^_5GDd>㷇//6mfCKLuΛ5&Nڃh~pHaVuzc+ ]1ƘY"DxbD@9Rjv3O81i9L%{g=rP(Y`DnÓ\99BM7'Fw']MlxZݜmxbڀU[Vx@3lRl|9M`BGˁ.=wnl"Dݍ~ݛnWޅYD'Xi9I#gdaɒ+'ΚZV&Iy߅>?3!y U::pӇ_pgFU:M68P:(΍`^)4H ^Wf`Wd _'uvwM C{t3E 9(ǞVƅgop:=rQ=*OK\$RH*#qiG2}tfYS<K![f%i=xlGl#w4&JS"rуxܷw|}bzX]#-&oQ!~۰ˎe/D6)$"$U! pNBZ-( Q77:r; 1g Yp*z]B ùB QrWi މZ%7c8fO /-nNg,uZWgoz>HbZ~DʈUAr*}W Xv+##nET ؞` 4yV1~\^<8|w.x=Rc 1kWSM11uuYEf\N`u,{ Cׁzk%'zi88XORf'ٻ6$WÅRwWK@:b7X``=F?-Rp}Ѷ(H1$RN=]uUuu "&_]^4=)FR8 huE9)dX,*zG^T^GisQ!8huO>/@k'8UT+^uR:T;R?JѠ/[PJc2KqF eU:ST)WlГ"ܳC,deI@UOM9X!K5"⪊(1 A'^ry*qۂ/&Iŷ;O1^|һSwYD"I2t22>q9IURUY`LhDaH.ko_ܽJ4>5q櫗ܡ&9iY:w跃}#=R Fe{STbo 2Zk^TQz aQA_֎J)ϫ܆<pd=ت_[D!Q1Y9FqI ÖV;:t:utx[]6W5 ȶ`R @[rm)rE'})9E*7 XP 5hlTe+58<s/ewWW6EL *$+gE)UPԦ@82FZnŶz%AT赖*LI*Jy.Shb5>?KVYaG3UVЅ^ gp+zA3 AzEh90 zJriR|T9j*HT**͌yUZ$c[.T-s.S^]fffaKHOӛi._9sU8RBJ4c V lpы HB-&˂ Y (M06ˆ*m%REen Ì'y k&]am5kE>8â^#kaI"aC-A6h# jg[ê ):-,d %X\D2( )QsՙNp1qLےmZfDq`{%> ,dYK> >"LD,ZU>A Nih\4X ٢31䰘bR"KZ`%&N2#wDž:/Ne:lKJE2// މl Xiђ,jI}gd")D5Tf;ےmд̇f;>| {vЬRF> 6xs5an ; &r[L FY32Ð(sR:SA{2=_H;[{ސ@ݡ@RH`cԚpح";2 +>#S%]'l|6:dPJ ?g,LLPTJd@K!:0\䢴r/k"QK6Б<0[ g$'iLK =Yuy?ou{\;Q$Z3,EgyGR)BT "!Z hTr1j #ʔPx dfYU$jh)Wd0NΟ˴KKI(k5Df%$xy;+QMb@ Ӱ|Uܾ,ˤθ_'zgz>FrD!PZKV TɎlkV%' "ڠI߉Oo&iX^jiz:"@5p C-z>e9ԨuyDƱTM6]􅹯F ㋮hēZI@2XQJ#]H욉!}`keuzRџiN٫wd/pZW͋EG|F9Fai$?܏WMn g0x}rgvIN\(N~(7АHsE3l_/װ XfT\ =*jrqϺ@#(ˮ]i8戒wjM};OGٵFo?f||V;|~;ӓdzg ٮ8GFWMW$~ˇ{Z&_ZrmKnl]3j}3Sw6*,?Фer=A~ONozz=b}甸E'׵pVw/9 k?_&iP1fo=75$?nv2}ERI: 87 KY4/>kށzуvHH"'[#MX|-.[B:Tz-9qn ]% 6ҋ;7nf]tu@{p4>֐N!59lNYrd[aS&ukw8{7鴲3-W] `jM& `j@~[evg;|iCvZ q qB٫wcq3Z8fpmZt[99ݐcqۡ$6pcWC Ap<;I-D2sO֪]b-'KnUzZgfg<Y,YWg=z;neήt4.o35_N..<w1zΜP2k"!駿| >ILs[~j>~ĕKh|b qD9)DmcdQ~Lm'(?iNZ jc1?c@}dCᔛuFk+7k>wmJI?#"^ ]1Z]+F@W{HW`8ykٟ /thu+Fij q=+v͂ ]1\ѺkWR+jJ$0HbBW`Q:9ҕ1> ЕB][:JWp=+˧DW pmo|Wֹb0(|[bNKPj1G+'; ]\+F}cQ@W_]G?N]m61]mv3ʕCȝ+]z34Օ:x$2'zG4MpۢiufCV^ }xfpw玎{:]1J]!]zDWb7thAt%F>z {DW_t[tpUoj:]1JcC"M#"zNW PBخ\YB'c wי(6CܱO+B9//3՞ѕ]1p\O+F ׮jK@{DW@Z ]Z+\Q*7՗BWZ`+B.ԳXg3E;Wn39ZrRC aAa XGdD\ XC\ E>ɵhvݵ@(0ka] 7#^"m[tp ]156n}Ү0i+-t(W*t?tAӧl ]1\T}+Fk:L(]rt2tey#{DWD1\-BWvw(@WHW{DW ؘu :&:#!J5+++FkDQZjʣzDW^7}nov-t%ʁzy 4J{Wfݮw7v\\xCtD4+]Ɂz@uHȑPG!)ȴktmoN'{D eohjfVt %5Ҵ^j#"J5/tEh4Rtt֦?F0u/tEh=CQ" tt=+mõby'COjgy4}uq"vRf4ѯ'cbىkMpyf.y$sO /kK]U,Y[1_ϯZ%u f~ſy W!NtBrqL % Bk/>k Fdi+\truu59fag4|]ٔ^ɺl'ԎiL=f~!Q\Tl^~}Nwo+hlIJCψ(Ӽ "Eyxⱬ"ųvh̕\b?]!NΨ9!i|nx6 %!2t1Ę`gz6GїUҮieVQрEF02] $#,<7=jIg$z.},O\|טAu~9>.jI/n"AeZJ;mpmվzCD 죕eH"dAkJcV)@fՊlBڶ#;=l4uۿ2*Wyo[ěWIiD*JE;=$&Ε49Bp8<O8Ho/ yRhstn.cDȣ"1ŕio6FE5.qXmuɤlcȖyXfmd* hѵr&>6ͫWo!biN[BXk"w7dBI9a cfΘ-B>>#>ɌahƜܜBѱ͌g{  |"-3{~&Д3n#@~Fr!vhra\q։i' ĐRmY9S-X89f0Τ1%<\s@ | hg IUyrܽ*mkM4't`#:WDyKZ|HrnaɁObl!>o.B2fʘ{ 31fRNfm)'@Tc{gsZ[~wK#[) F9 9|*LYBQZz_ ՐR"@!qT}8uгKFQbcIٙi0dG)Y&Ş5ȄHt=k)!;D{%} j'v Δ|Y]( 2-MCf0`9Vg`򀢳|c0Z K.XV;aDpC!V}KW% BJMp5 XLăd] Dqkax&gXnvg%ai "! -`HJl@6}j[̡"|ٻѡ**eX|.H1&؉~okmN!R- 7 c ʄ wkYRT?-vRt_PNo&X^pWLZU4r-@3r34ZAe ¾o}0k% Ȅ5Hd&3Vtuk. AFuª ݄cR 4[˒PI!0<'!T fG(u3Df{=n~ .W6xϹA*I`>'Q lFP9"l| >{$|v1zY:Z4'{%ef+(΋Eʚ%B{Am1f P. zny4|}UeFnväDy 8r{ɵ#jF =|,=HNJdVtLvpVdrE =>1 #`=u^84V'"8$`[r2 FX,;yUySla5è*;IYPcd$=()Fr&_z1 U @G]u8f3UМpQ̭@zXf=\U'KS`jCv&f jRx&T$BePbI[l$dipMl(* t !BPqO!j)C`j?.>_ϛgqQwHͲ͍=}gE(Ñ_PG?#۪n{s3J}DŽv?^/߼ "g%Mko@\iru%'1O77/wMzd˻_^ |1>b7w^^|u_{!+y{5IwW?5_:<\B ˸\]^gH2CEwg/`OpUL-iY N k! PN: N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': N uH@ 9v(p6pʨO\or޶Z g2}}K?\<0uy#]{R4b׈/!`fr ]#=i{lu`?'qN4a_{S#σZ,CLY'WN3BFBiDT\r^k+7=_]qiA,-O}iAPf]Z8ǥ!`ЕЕ |t%(+]!]qІJmmNW@LT:C ѕ)7dO%:G !Z7DW~kѕ a+tdJPtut !`+VJЦ|tlUB:u[Z!on ]5'OWRYUњ-  ,kѕ گ1tY?tE9$'p(2qh,]Ǡ΄+:HK:@k" NQ ;{XG qf 9 U# D4P]a^|3ɓ \6mUo'ն&?Y~]1w9kvnmr*.Fox}Ɂ7`Ԇ4hkꏟfy7'5\0oDa39=9)@F)0SJ6˄$5`JYJ59VZ)sAZb# s+Û1ynR<д4oDóS u̼TZ[$ (,//(!@E ]uaũs_e9>\ZT\:[UmuhTkO꯷_ݵb<xu`u,nQF*K/'*v^٦t7sbpvt}kwwL3>6D͇OΫDv.qv£y2Q|~r}]'DIRzEaí43n~x PwKg1 m?1e0!`` G >x6|[ ߬oRM" l!}_E˺[DlEI#O}/RkG|<{,ÊC3M_OBbp1}bЂ],<>r"4$FЇ]5CVj,MF<ͪÓ=ǛcamвQ-3VB1V8sT䕈b? &1ob4QV}Eۍ:ꎀKɲAĈWbbKP,',΁e2.A$R)@w HHh ᵯ09Ɨ IIrpF*IɎ!]g ht1t: ="laZDdH'an0 z֧4ߢ=S߯:VD!iGkO6 4ɒ1Ñld=kMqhD*^'Cڊp =1gHpTFd" Iv)+ ^&$tl+s  6{@Jϖ\]H$F (BDRlͷ44k9ZcZ"P2cP L>$"S/TN"i@%v*FS{;& H&PDA謅l~V7-IKR{A{;"e\ٱ"?UڒAnHPo oWij[Y==ctEr`yvqm^$t>UWoY _h9Uiuɾ-zVh1A 5ja}e@wˀTcYV_xڃTIecl O%kiJٗ .wj 6 ɿU-MyKš;?jz(?;I{ۃd3uۖgYS:~ PDh0QmbDg 6 [>nܻVfZmMuF"2% )VTLޣ.% J,#f/IB"CQ?ɉt"C(=C !v j:nPEV0]p[6p6?U剖n#o]H m]38БWT Ԇbq46T})*67XJ)[a̵؎ۇ|1|҆Yh[M:k|6Lۄ׍ւINĞ=E q܉#m[đFG{FK zeMf">ʱXC#918.BAI@b)edJۄAcS$t`kJnj3qh~,Fd5zk{҄>ָP_Iis3c~q~«(t_0Kʽ bzzBڀڪ^\-Ϳng7s $1LY@EeS2m+|ͪ tQS΂ʮOd9f c29 [R%fE2',].>'+"$v}ĒL 0۪ʨ{((ol1hbXtRShTƠ I8E@@3q 6Ȟy6ڍffqxdc[׼P#f¹wgyf]51B)dWBJaj:t NAhbF,) ;TK@2ZY6$E6 4 u5Ĺۈ7k!n:0e~8k[ ݝΘ]_~uKPe.w/sNQs{~|=j[L~&ؾT^H > gq,Vp:y) n_0{OE_FᏚ]ws6Jԅ$\\Lu@j`Z.@S0A'Á h:*ɑuB5mc$2 g(6nUvGT}ewv/;R!V] ƒT.A I92KKxߵ\)9s*хuX˄њh u&:wݕ8wCBǗaa2pDEn(alC75(>IFacIE%Mm&Xis>[k]Y)f 5GрvfM2| _q1g3aBk`Hm&fĔ9"*\Q"`L|RB( jr $+r!{B;"Ul0"ŬXƣ3qig]Y)*>jZBD!``њ`J/Qdױ =<" s626jZ<2$Vd*Lʀ7cL?ut!Z:z1ptqmҬl+.CNHM^(4l1񐮒,džuC^4%P]d^ hֱ1L;$R>u1EfS"}M6W]ZAwwR>m]BR]Ph;tsr:˃w4Fx{9tnwUJg>?_!T*2 MĪ\~XW%V ztDAz`%IwIBNvG1{&maUΰMLxyU5lvH"c{bΝo`6rD Go|A%>rĒVH }6 ÅLh#*y Lv,k~YCm (FHc M*)`:0M D bhTK'U1Wz(́>;6olGsh .y]4l]*?-NK3A-OK<'zqjj~YgMݣK[Z$^!Og"P8E1|0r9|oFKmwaTk'墦LQ \QErV/7Y$>"9 8-^l(]&{Ih|EV .Pnhs?6o? xq:tt/[\Էb6{/~h"OT$T{ նȍt,.f7gEkoۿzŮp@1_+/0fT챴HQz{[2<49ILHws$V:+93BRex??\߼+ 7C7 TPdbxy/*puV畳 \>9I^'f>ʨ·+$e4Jc,o5Er:J1&*y24`䬗{i",3Nyyџpثǧ_eOh8v(ueslR <StSZXZF %dL Vʜz`~V`w4HOF J=>XD; D锨4BhSC4|>t~<5#[etck?&Q~yBvz~o}"SO 7¤:_Hx3I*and*$-=.%0 _{; (([oEM2R8j";e<U@yN4(kDQ6ld.e 빱1$SIIψH'LIEx:o0rևQC74gbS8TycGߜ}qH^iDn,ϻD&fSYa|wƜ Q-QJʖ.-#$J7]{rKPc^nE#H uО@>GCQ$=3TxU)g`v[hc"1Mh 뜡2$-(GJlrpk19y99`%4X5!6x4I-HQ 2;:=d4h?OFn4EW em5rI[MP6C;@kzNWzr5?İSZ߀Q!gv6*=ucsh^?s)6qW}F;i|0H' ׎,>N =zhRpP]v3-+WY)М$/ڹ]Cz'p"pV$$B DLHf&9Ra-@><eQٻt6Lk\amJ\tlLJ@+s*f=uJ9bVKM Uhj8\5%|؂2(*"i͟BS5 (JY/4k34Kvi`㓧4xpه T}ATZ&GCDO&H1Ō.!( F nA%# T3JR9/I PDh`ǂA͂[4H2?fq8[e0]8gl uC/E{pʷhO~ӠmkD)I4"JjgLG)12C kr4dg&ceɥi *!DD02WzYt]lJ 0VK'|VC(6xDZ z2^Q x"^: /X(29aw0Y1O?l5^#xi)OWh?G"njPA:0.x,{:*NM򠀥g\p^C1F=z h%kYߞE|be.Ts(.V/"^/hYIQ[)Gv b>p<ĠD wA@2Ћ;;Շr`}e#ӻ6g* nifN4_˦à Up2(0Ut4TIZNx)>}(v(%P 8;x'v'SQ]\G*|B[KU4)' %x t0sP)c$pP8q6V;E!2arH)6{#g,ť6!.V &S2%.)5bt7;|t+[jfkѣ]ࢳr#W3$@C`;瞡XkG'JFI\gRJ[\KxEn-K\P0O KڀF+JJD(Qr%8P;A܇cPȸqG9=a>ǻ%S &oR""1g,uF6Q2 @YgO4m.S!7\.b2\ϡ'6Y mdBI@;KN:,D(;ԓQ|k K &ĆE}$x?L8_go>jua}Q8/YY28G$??|Cߪ{C (=nCr9*~Uqf'Xq*@hA Zh'p`q꽝*|\FG dKvpANhz[U܍/9*86e{s/@J8j=!E2UC\- 8o#R,''dɧT/ 1gm:|/ [+EڞwZp~~qxRɩ9X.5u7jCvy4pf? \'rFʌ@7m]4sh CToo|^\7!Fmkb$Ggrs5ׂ#d:a1_ -'w'ρ # 1;ufyBdZ ԢL'n1G3Y=j}=Er*c'#y`U,jb=N [ѩDGh. wN}o>w?;C0jhk`h|v7!gܻqkgvKn-@?,}= ߌ*3.z#o\$ b~Uom WğZhv7 ꍗFx7?iM 4زΗ#I ^EQzvB+6a'Dgl )ǣH=Щbxm[GI)HT28Tt8U9YW|@sbDG SbW㉭PvM\Rƶ;;;auέy֮~7#W/wR >a.]Tu vQ*S2| 4QYY\ܐ**ޠrz (]8m=")*8w..}m2y-ϗ-8[plXYk\*>/۸z`qu˂Z/Rt<[ы_E&WՒ*x Fjdz}?XHYJL!NM/}پx 9h|=t=?KŌ Q&>qY+J߼YۥWOꛓm^g;l[4k1 ='e2 )3S4K4>ǭY*sdxZ\,$FYj39LXR OMܾCMiѲȻ[p}G$ug@ U̔s+>)A:gSf)˸LΚQ]2&ZQ'Ԏ4gw/ƻl>or S/y`C=<8,sË10s g8Fr h8kp 52Pˉjp d954 \`&T0BJ+TkIq*9FOW[B@"\\C5 W'+#: \`Et0B, Bw:A\Y ɻX:\ppj%o;P:OWrâ}"bj$Xrd\5KqqL8F*M Wdm2Z[tf}#*Nj3ыLh9 ڄ*039|@x7Ey4-ե gq)G̀d}f,({tKn5 T Q. ӨVci} (&/_ꓘ^2+g# $lme Ey4M\ 9vaQ1N+m׼`¤fY ]0EڌYTky1k4i ""Rʀp W(WPpj iw\NWBq@ `pj]v]gq%1W P Pgr Ji W(X`prM0cWVv\J:\ L+9 W(WPpjm heWFkE0B Zz\J "!"b X  Pҡ *bԆEpaqHTǞl&cW*r4-S5pmSi[Z@%}fsW -h}j>U͒qV9xM꫈} te/6c.ԗW#m/|:t8K=iSp$%x{2N10Y1˄1Cqe}u% w. ehy\FqkGZK4Ά P=QmcFxϯ^8&NocZIl=w:94ubzʨG7—RMyͫtj̽02"#6NejI:RNJyaR99i,bdyT'yzյM=H5rYțܡ1pP"i(C VѶ-JmZ`ڠp- WBD(BLWw:E\q˥ ?7h=Iq3P˖n&WʤeTZVX[jک.m?vQ$?`aP%o鄀/kF}zY|dm]{I\.FWUN}?_ռ(G[^Dtx H']¶ۅ_h~WT5|xtBrEr@0P1kY j-'mwƬP;AgB_Ao!>aAg̕drIļf27Ws6y۽3mTVC՝ /*wP7, o T֌oe| o0fWM1YKHmimYlr>zTQZH?boCt>o6(8ꢧQlT)kn,C╖͔pun3-cg`:ZcX0.`ZfĐMR=t1bk@;|݂p)V$gy40L|ǠԘDGG[yWCeX#UYAtWewrU79幖Jim,Rb8N'>7\]I]q@ Mo]3ܥdi+dRw^FˬwhZ-c9^/e/e[*#.pl.$$#3g39S46eDyFDkvi%4 f@{Fj١c> *DNp@܍ W X* Pѡ *Fڎ+T)hWD萦@L7\wjmw*;\$,@R Z @nw*kt:\IeRJ*K W(WPpjmq*)puRq•ҜRwjeq*ulKmͷ_10}ĶS(Wb3CYnvq_3BդQy>׋0 ]F:Bn4>:\-;p/̜7Dlu^.:Q`#!k $·DfP4EuJd< T5eՌ0ZhFY4D6>j9¢UWL-P,JD$!b,A(gE5Dy=BCD/LY3!HER{0֜((Q;Ap>}[+=j7RgZv 2 ʐwr܂aUnq(Cz,DVbJS4e,XKJ*I֌fRT)lI7tQJiWyd_jGLdLRsߟ6 Fz\FjHL!]k(NP/^⤷Iۈ`VGYpG$)!kXk Rq:AU9> Rd &;HJŏ-CٚH$g!ڈ"Dpyր%jU&4p((tj 2lZJfD>dc/cq嫊+?z}U3{Ac:ҁHVct Y%(hP]HJ :H,صqvpp>;D}ᢍ*,lQ#`f &A){*Cq4wE՘7l ֙B@BRNe`肁n/6ɋV<&Ȏˤ;*{fܻ;>Tn"z ѵR&I1xWBQ`4[HJ"Y(~fۭ ds73tAl[ }(%L 2QD#| S/NieIHV,\P UA2fHh*A`Zܫ8YoWfnGQ)"5?jrƐ%BPpRhG#9dNZϢ+G}~lOFMV!P1SѤdHB09 J2BvP+.#OyG造@$ YJ'| +"E@bh3P$#rh8yqs5tbhM^G("iV9(yk+E|#m%+|u9/Gkkan3sK66U0g3]8F]_XE ULqA+ [huP:\cpbf^k0mtn5IHuAXl>&(8lmO6[<4 g]~l{evuvUoO{jc'Wxk9]z2mN?jw̖AvDZ鼴*0|h}ҪnV)~ҐiaM}(:L]XHG\@8:݁1 Y)4.vaph'cdҾ?]}ItZt.A̡[ 91W4OHFoSaKOJڶ-{xEI"F4d%fE)$`Ro(QNKㅏfвw闇Ԗk5۸G%o_HK 5)If6-)E7ܿr,&;k&GhՎjgs@Y t{vڠLJ`;Ҧ׬:Qw똂5q!YO *a@(>+UBVQ椽Nm..'#$q)}GKLۮ| TaAc7EK )9(G%&aBz_59[u왯/} \}r>](MZZgKE{1GR =uW!.z:@hjA62*pHPd4vFtA2fT }l-8ZuZs LY&g;xc3Duv}Z[{T]3h)ꆲ9VQKUu tdRXߗ`<nsT퉡UUO n(,S Yl =N!h^} )ku*2Q2ë}N%d@G8{L-x'

j@j`Am6@;Rw0@dn~9*qpe5i]ɖQ/ ڶ}m_tױ:Ⱥ펚/ǎ.ڎZ #-ctHΠL@cfIHBe$#qKeON0ؑ8GbSu2!zpU,am*@KV yFY)փ| Q3~?pg-{$]Dl=ղ9JrcNm2C/ȦʻKd*Bnj*vQRY7s9ܞT[Ctњ ) upkJiIն6F*|N\qD-P,JD!b,*?Y'ti]L"{ 6TV|yWžj#Ko~CC1BwYc&-]yR"e6:)K@:߅ PKj/ /ihp%aK+(Quw(TI1|4A41$!G۞pr9[S 7R}$T,Z"WfI| 'l=U1l&΁nnbBlЙRXϐʱxV: af@ ((U93=9(LjO=8-&a2]|?ϗW:]+/:Tr}T۴CU~#K}ڧ3+IwNZ.Ȭ:(D K} 竤~do f<~-2{&`̥}n>eg}[~gtkϛb'Yf8 z2'oR!ݲԧ$k1&k)z!?}))Olu^\4gӧ4r7_X[bwRCϬg~&ʣ.߯.'?(_}O]|wly͎|Q=w;uƿ[O*^浉F쟺fƲAǣ;_-Gol̤wc_ {7 ׋G+m=|}_`p}yg_!:vn}g&^Uo;bbN5חzh+c2fξÍٶ6:v33f)F,U7ȻIpd] .KN`R0cW̻Pofxۣc?R`6Nh'*}c&(ޤZ~1@PR*%*P Rq<[ec~8E&KyYqQο~xN?^x,"EVBʄ" \yc)PκIG8Gx#Fvs-? W$=0%i#P ,]buc cU`MZB Cr@?s;W휑|[;xt >Z7v<;@XE/QZ4?DY6 JRA3HY&\Fzsy8O#zFyrZ$Et !dp0 {E2ѩUSViGg@.8}g>%u(2*t:UJNX#ckMa JH GofwChf-CC C5\.d#‰W^ z:;ݸq(R%g!b S-bfrKuPK7z ^ZZ*J""Z%&@A ]ys*S'// *WO]WTFѐH-{1EF2eS?t7)ic͝M>I{=خ\oi?72q9gye+'F rJRZt5vEGbWlOަƝfMɸ2w] p3ApzX9 JY%d t6rO|IL]yg= 8ug?^OfWJpqX>}P9qttrZՓ^Cȴ.N?C|jvҊ@ LrYSlb% !IWhe&?)~%p~yU;;tvXUhaKoCZd (˓IZr~8iWǨ1B,Q&-{ntvvEꈨ ػ?\޲Xՙ!0c2=v20i lBaӇf^jmWwdđwK 15ʗ7$Je=\ @ʳ䯟Jco ?x=gkϯ\O"]cOU 4ֲZk$#gP;fl=Јv&_ɞү=?i;ΦwR^m*3IwK1@kSl =jeVOs(#wM'ϝ~+t&N3f+4dS =hpI0iTIcTRG!4(g[fÿ;ڼf?n.? tIg3E/Ý/$K<S >ҐKrD{c.k樂{` IKM)sJE ;V.ntt';?YGqL:g$A&4:M($E<H p/Giv٦Ji~Z%8_ɦvEӇ>L@8%qѳ-0e;h u;N ]~oý} kwtn1U[K\%Iv'|&F[nyvu&%5cFU`Vjv2z߃` M$\RpKY0$֙-277[anNp:tLeNHdZ3  ,%oPR"WQ8%3xC-@{KʐC)RYL6FΚ7B6YE,|:8L9A0ˌc,E+\rT*:C]7`C 7$- QSl ϒO 4@$qɍ(kʈDz^sB[N-i;Y8;ߖ`͓aX`M,P A NmRKg|P`B@I ͝"d7rְf9|qq֦Y*]g_dS\=oh9kHT`RB#cMu#Cb ?(ۭvx5ξfǦxzCݒ;·T푷j1cTQ1n7Y?~&G+?6R42e7!ttQ`Pt)1ZJJB ݟIph7~MʞL}gb9a)ُE9܀*CB^'4ؔ lTh 9Ÿ$. ZS@# '763s *V.[%o7rִR\pJL0]6۹.>u&H\S7ek1$= <0N(AYZ攫jp+W:^(C:.sW10)ô) EBk4W8\+˶O]u6) 7`lUźDRk"  q 1h@1ɨ,v>顶¥r1nP! ʊEH!JXR iŢ*"x W0ǀ( 2i%G D1(/`)!q¤\Shw˪,:;s8(RϒJVo8~*}B & 8qo-'_(A@=#% (/Xk?8ŕ SY3)5(d a47ޱVYf38tFl&'8R@.Rkȕ%m*( 3c"So8sT5ִW`??=_TrjMe,' k.?z4 6_Od }X\zvQWϪ5ZU< YO 5 |~hx|R-c)Ɠżvo9#?tigOB^iW7+:Y,opFO*|̣wxtp~Ypk]v=ewv%NN!7A1IE}GI?7j *?ԍ uqg'8ptϿ|s??7G;8zq#0u`VjSu']m_]cm܈5fϧn|~;xJ[- _o-#\(3Õzނd2?gEhU,PNc!G%!I'P럑^on Y-#ѭ QEHHbР l ϗTmE"]hkF [CIv2҃+Z+6׶ܡ|?Z!OA@}PCP,TgfMR¯D찕Ik,tz9ә|q-L͉ pjйloHl7\yls [3l-d ( =N',j98rw#W/v.߉J.*AIFZ/zsy2m.! XaCv5(^՗U[wQ7>hJ^WmjK|/Y 8xp."!졘]\ִ{%90Q O~Sd,yDp ) \erz,pJ"2k+fx+,e4갸5Ln$j1 "/Dt2FMdG $HGJXwo܂Xsa G!h5݋G?u)-CM R7'r16ٻ޶r$W cfcU$ e؈c{$w<-ʒV$ʾı^Uvx0ɯn6W]<|=9ENC^jhַTi8$ìٷssٻiZ|K%?h&H]h7/X@ߜ7-j?ۨ9N JQe7pb1C |bpJn\vAޤN= +b0wfp>v%EhJI :Zj{(j|-(]NU5ك+6 \UkZ+=W,:Zy(p*!WJ-•9 bs:kWJG \RjW\w0 [``Wo4rVf+?i >~rgw .gu#|~zsiDuvfHрMbJOOwxX^ix(v|X%;W{r2_nqOE9츧lGOkK"Yr"U DMLP1͌,9{K$Bu&NHǭЀ?/_///y)`>/z{̛v jw̽iWq*`gOyi b1"Zk"K`5颌&$ޒ#e++a;w~[f[BOa~9~9:/ KnήO~i]ϥ|WWᲔ#{_(Zݗ-nAa_Y덖SЇҊWs'VQE7v_4=V&?/-Px B]( wڅ»xQ B}w.ޅ»Px B]( w.ޅ»Px B]( w.tPx B](|E^Ҿ­uUO;l>?@:y4~<9Y 7d?#~qE@ۼx͇by>/2xDRF!. e-** ELr:0 kyמ< *JLz<;J\oi!EG7蚯 3񧣣?/yswuC;6pqysTWܑ/"loOg,܊WOu¾$_OO-ς[1O *amzz)_?M}X킺-rBݣb:FgiSWTioiB*F`H0{(?)寲өS]=u$m~Ny;?v(,S,T+(eL"r2A&oň 3t>I!oy \Dʘ")'/B h|acl-lGjyjf'>.C} ClMKp-Y}//oiɀ>[1J|yJ#p).+Wʱ$FF"'mx[Xd43%v0h=<`<7߰/ 7 >k>ċyK`M&YD jh`0=b=#OQSsEl]Q+ ̳g yoSѾvXY (v,@i#^f156gѥ!.PBHч$b V$$} ށw:iLa͓=F\c2뫐FǚD)+˓"*e6J%R$xM:>a-') 4]u]o/G"{J~Z逘yZјh='"($_sVAMhe & ۈ*laet\D}-"BcDKtQhg-vX]1}ʲɚY ЅѫRlG]C|+JHU8Rd#-d=iU A6iaFl5KEb'#ZޑH&7/뀖YVdk JAI`d:.Gl5;CԈ-Wkyz?ȍG >:y힮Me=n׳_)X>w'kߨ>k9(`g!AR ;8$$-oE-53]!F_qA.; S0EAS"EqɣQX*|2:FN$EI9 -# V7p3s68]]#o\EJo%0v@ DYSV&I)ƣHBu2"MdylrR*~^DL"L:yEd<#Z!"w֛\Pk;9-(XE lj*-/BN}q*'">8@x%z][_c䣋VI@aǠ%A2 %ISs"zm_c#Zc|=Rmʡe`[K|p {ԥ BLjҥZػtW+.oK7S/v?ytrFʸ+d'=xt}G`+[ĩ4 j9,5U6DeJ G(J?Pv%vu<9dC VvbQQSd*e[N+0m~u Y)ź]z!dN%Yr5HY@{JQѦPI3sv#&;糍Jo g#3"BU'$>CD_MǺfPDJzY Z]1jM"bQ"8_W{* u\%[o9a.5at󱼜r_9vvJIzNה6<Ч (QctfPD0a>A;یhJD/E)%*.jŅ> AJI9˕xO!UMCM+܉1$50+mIAk|Niko/SD2HGD(&iGYcֽx<%zBcD[a>ғ>Lyf^]?_ǚ>=8yIWbA7N}^8AZ ]~Z,ZM~s.Y2A 4!@0NeӉ~Hw cN#N8;E}Y2'>[Y;bs"8dΊ/ U>m  ( Bʾ@_Q 'Ȓ`/5~<#ߟGCxՇ޿O=ܬV̡sC/'ǖ5+XtFabEH)vbR$ `[!tuɺc~1uz[qtӻ>/ӈuK FޛlkՇ>m*F~K]_wU Sblp^]|h Xm UwbpcT䩮geU}Ңy[ӣ.fb?tE[޾C/۹wIdxP,ǝϥ/fK^lyh%,&T JGTE+| 9ihoRyI|:|ߙjS}~v\pr.aG'gTj*zCɆ/e=s_ : lWMXe.*Wp.!80 KjI\Ҏ9tJIf"lGe3L%?\[E_4ܻhuTȣ<èˠ*9)Gy6t?Q`/& pMVsbl_}B?:5NWsN+9]h2$Y `D5NcVz좚J.E5m~T)5 d6B m΀ҥsDʌ– ܓ!AlD"iφfM$ : UB?{WƑ BO`cg}ٙaeNHVoV8H6  [! P]2+@wV%:hQ32,$l%vۚG #|V9\;E3p@8f6ɢ!ŽH9@kqL|L ls-8Y(`Q guT`&Ihu1*- ۽9,gr+Q'm K5\bZ01xF aM;ʴŠ`ۑf 3"ԉ@"pZR; $5D3Ih<"$G\0\o򤔍k(KZA-uvRg'JUj&x$K5AaI5b E)A`FU8ZQGuO?w٦=i X$ґ$rpY&F,餭=v5(=Ã" lYȼăQ/cMvQ\F׹t5"zNp-Ś;FY1uўx,ͱHGktCE'W 9 ّvoj!(31j&VFPƋtŏOHŧ{!":CuY}@jqQ١[Awm|MBrbe,h M,W*0مuGzLLLIKሉSSa͕Qm\ PƄp©Iē\x=>QJ*Rۭ[#ShCC检ysT-FGN 'cU 2]ć Q1HTc4'o3H(J%69k5\ex9;l 1!HǢ N\SIXQ e6Q }5c5`W|xxW3?8{Ud:M'yɅN3_oP٣3#T@Uxۓ߽f6OтUCH8{H{+(s8aSCz'Qq:h3n2{m)Sz+K ̅}n"a>#i.t,t *ҕi$d k* "Zݎl?Te#Mh͞3@6://cfS 3N(wEyYb,<Ѧuמm?L$ 6/f7ɰz+ͲUXUOy^q4:p{ecHi =R DW49 w͓[ 8/9N^꣐I@sI[9i K JBJ!,ф;oMWNuZ4 CD= m !>^jCg;{j\(C")F`s%0 Z #x}:ji!oDB} kFIE H 9Jw`s(.Z~r='E0ɿ]NBIЫH\"^NͿ/Ÿ!0AgR9F BOU\ѧնXP2Ǔe7e|U~c2.D{,)O兙 it5KPD)9@ MG, XSVcngwi>OQbEPgc}oJ~.1"a6썆J!itշ~-žkaz=<`"(G04 YPx&Qf=.+(9OTx8_džwQv:=o c1h8*uKGʰP(pp5Ȓupq * tae*n9酽Q.Tχ1|9_=ZT, *S{fnqXϿ/u`rg~r*[1O?~bd>`y8u{ ,ُ_݊?d=b+xs('q@QKb2tQ~.H,~!.USv.Ke#eMB^3\2wΘ4.l܄6Z\I rtuq#} =j^EK [)P%cCK*`yvիndth~{;KgbJb @d)EB=1'6攍OFT^WkUep]qT.=?qe]B:$u }[5-<1ӄ L>J詾: >vU xN+nq.I{]GO? 0CGofQ>;vb΋\cg/gѸP6s+mm^ 5ܚh,F?P5+g砗w:85+0z]ox'Etѫ"o(FK|yyҎQ?dg|P$ͧɊݱeR -d'ײ%T~y iǃE MET^IMY/ ;.D .K^112),8q{Ku eq񁉕.Z8Nxԩ'.G]}-_?eSLk`& d`Ć^&VHʄp#hLф(] /w㉣ /۞;~k?|-q/PxGmDF O)k̝<*.Y$*jr9OBf"JS8zTS;(Z~%]duEPsqƐ ,$U0"Ѩ7A1;_+wtGyo ]θ±e *Z{] h@cu#i@t)yPÃ:2B/ BMo3fl< 9lw_~r?'jFnZ2pX1-xق-Xk]e4 0Jn7ɝ< 8 .<ٹ2=x(>-l%ur(~VsDv5$ 9F],kԎ# v^j$StdU*œs 9R٦#* Y:HDjT&TD9jֲ 5Pa2̞m  z(9e0zR!f6S X0P!kV?nF?i_yT@U4U&Q3)B6QZ1!PQqtQ'Oewi|N*hP-Ô:KAktH ̤l1få`~7"Byg9ũ_79V\gۄ +U|6+8mZ+ɐ"9$:e-V+2a(MөHG79V:|g?%E3)\dd9d=ģ,D >$&(ؤ]&Cpq-[7Z~Ky+x9Te|.:DNB[;@BwC.}gU^XΚ},g2ÏbrY7ggߖM^jAC>t׳#y9甧_]C;fsR{ NvDZi9kJn׽R\|= x V?`7yZ7}/xHǧI^`{U^/o+ڂxL[T`s˛J" &jf5i8fH2DgU׳]|\ME7qy^iyǷ^Eu!KVS9D|Dx//ZȨ~${'┯յllGϢ3@9Hmt[mtiqs<% ';yڑ~XO.tg[4!ɧ^[xRQP,Ag3}Md Er6.\Fg6#T=4O%&T:x-ouu=e!/oې.[H͟Ke޵:F,z?^eiD6LU[-K?Tv^j Yn 6O[\L/odMUlt4+00 ;BvEbE6!nk̪,w$=B.lz}մ~0s0-̲ FWT[_pmQ0Bs Z<<7=haㅲ ɬ3g'}Dol~N2Hƿ E=0f? ] ]}Lx36\X -S4*|zAW}松([|#x3sz6N/7Jq`*؍7b啱:rXww`U8JIcᆯTФ=?SRK^ގR11D98]:(ǹC^ĦPvLSC&C 7MN{R^KY<\MU9c忭wmi忴Q:nBFx>ɄVp{QC; y cʢ}k]⮄j5gჰu=EHdڣY8٢٭crĶ i2(/+:r{7ޚf٥x_ݝmǻ?KjKj8B\dP$g.A@#-DO7?i̠]sjbFeh|]:_7CuMmS˞Й(> eрe dd:O9z1DTPed DT!+mڊb:eQf0շtlev^j (MVPp&EKrJҬbd0edf56hZZBf4e4{(4KZr| @yuEHQmRYdRX [RNcIΊ01V7 Ƭ69t~3`ICPg߾'8 ]_yI%,Fdv'jV^`M:f:hQQ", #ATY V5qx'kv6)9~D س.{']WF,+6G*wa\TvSHEYiM(kQ$`h8Ӌ)*'Cqכ$@^X9PDϩ"$rڪ"Ή 1hu}>L ր  @m DŬb"@jRaZҊo{\LĤx!RV|iC8(SD-{.f/ fX'p]bԪA=4ZU5* HnZρpr!tYPP-Q WXf0ŀV Z#qlDS$(E Skւ"BN 9% sXYmX$HKW.(Y ʖGTEBv)+]AC\FXYgotͲ1D"z %!X&+(Y'0"+Qp@D*;AYVA0z%XW"Il}%MfTX]p*XX0ʤڒD $nBiUs$# pOb{TK, $p$$тȎ!E%,QE)A{kIT8ГX Ee A,)(F/WMuؤU(v(UoQ{QxA= lȎ".\b0&"$SOkE0 :?AjX+S9A}-X˄\ ~ \sVJ@к\LITh޹Ƒ$gU3| o;7;+ (rEvkMid`T(MNq f$v2#RdqpʈqBՠ+x3k#z)$Y'oA Aꕶr{&]՚6iRJ9XtH(4v݈5V།trNu^bFUD1&"8i|\T1yQ(pB>!bE}wfCϰC9Xop9+-xtjV2.CDnCHGd9:dK~@xc#yҬd :ҕjIáJ2" ,ԇ)V )kIVjxނ#286H}GfH~D}%y EU;+Q~ݶAMR tY+DTicUDbPpt.ߌ8YHև\d*ӻsVH@YzBCv)}N߃Ŭ:GoQV-X1`|Yޒ!Eevw5J،DAu! rNJn8LHz-xE*-fdSJB6!X<[vJ߳< (< @N&eGͱk?m,BיI HQMfWbBDhwEdUU%w ¬iXYvcFwMe#@DUbk"n;4C:kˢ94IxFk2+o- \ti[ ~/GH-AU6UP620mP_{ŷ%E SaCj)h\69hy`tY Wտ ۵uL惘Cn=#u*7ѓХEnc+`ui DaOwm5EZSR ΓD#GCkALy{a#ܷ5͈=Xe9h7 Jx-C[B VDs衂:jNae+Mc΀x|eR^HO3rJ8n '5>T2:Nj9mgSMh8]"J.YxYlR N"!:M.H,=wP@W#t Di_ m#u+=<9bj^0QbBSh߉1CІ̘`3&1 fL0c̘`3&1 fL0c̘`3&1 fL0c̘`3&1 fL0c̘`3&1 fL0c̘`3&1˘ъJY-׫vt3GvYl6x}Pm;?dФAO,"]q:#&6wfhwzgry96{J<'{=e7Sߵs:)pGxR&/cҝUu;+_Cufy6.мҷ1]e|>MṊzQ( ‰`‹FYJ' qbٕi]G]AVMtuٙ|-;9dQ A2-hKvȰJ* "yN\|q6؄:XW"h5RYCs,849ࡇ;w J xh[3>W94s(f[o++\! 1WBbs+\! 1WBbs+\! 1WBbs+\! 1WBbs+\! 1WBbs+\! 1WBz\(IbsI/\F')6|9_c]ᗳOxBs%\iHC3'4JBWZh)4-Ph$N'h}ChSKs3*J[JC}VM:g-4c1泙'ms^;T˴'`!W!~`k18BtT X>M(}_'D|,_-t1yw1^Ϗ*@Q=?nDi,Z܌>l'hZwf ba <(ЧkJR Z_{Ә$(ٞOyqlzYYۘ2Kޘ_qrehĿ_6.3DKai9" P dY'\&s:By^l|f2f*Ƣ Xoy0B>#0z?ܜ:8 @g76b|ڔZW>=N_׼Ko &<;M[0t7d1,KU? +f&a!pxt={ќ>g7$n8seo+ߜ/3wjF>}Ngx^^ oݰ]ӬC]N}veqpߒc7ܝ9;q7w;z< 3wsG 3wf0s;a3wf0s;a3wf0s;a3wf0s;a3wf0s;a3wf0s;!Kb_s>\p0ztXyk ff"掑Z=7s'm瀼RmO?πgx@!4Mt,x^#CCp8('2?G(tnY^{ث5y뤴)I4(ǣxv;&\YLc/dyrq9PCmʥTCf@\RH uפEyց:2l</u\Pf|tIۃ{_esn AN>7srGz2$m3hquѴlYKQZSYA6euݠH쥪eM|RySwX,Bf0Ǟ͜=v^--y(^{q(^[ vfe;d,X-hъ(b:zS M)ݬg:JV(ddbP<|Mm8T(#|>HvKU1l<GQOj#x0~Tg="{ĝ=%ibb5h8@, -ldj.fTTK)fsg3aQg:%Ovfh/_9bյÚP@TlW @l QȠ uu/|\y37zxW]8939Ag:sЙt39Ag:sЙt39Ag:sЙt39Ag:sЙt39Ag:sЙt39Ag:s_R z-%-5ErApߥޮ_*mTK¶`^_jyvXic_#ݺh`z#n}M a-ye00EӴc2=/2 ͙^L ZF iKo!Wo:XMBTnS]>Z?g3gO7=_y/it"cO۷GG?T:ڜߔW^][5Ziq}kDŽ=&{Sƿ[ܹ˳~w{q9ۉ=v>K,mxӶ&G.6#t=m?!o2o:55-O۝O޵cٿ"4e0:|hL0Ic>m!d;ɠ\^%[(YUQ`uX$Ͻ$AC|,>?xMwD4\9~뼒޺Zu}l4T8h/pk0(G_ B_u-^Cwѧ7~_o?՛0Q._7`RAj_<>W/;ZjxT-^MT}-KEU'ߛ]rD/_^إP_䴎5BZ`99EN>aLC㖛!*MJ܍)Dib _ }^@6y;>VN6>ٙHSA* t(t>0ؙ7RVV ]J ,ŝ sg4ʤM)P[%QX!0()%D1?t^p pkլ锲3aU"/' cHJNStNȪ&l|mG"YvoS:l<OX6 {<ڃrf1|%gStRy;p'lGdM7,moATT-Ơ}wrݺڬ;tZ?>nfyAB>hrlzJɐ^BlՑA\')۫Znu۞ ݆V:[/͆@>jZ}>rsY}SO྿A$|; R+{ӝX:*s wJA} qP)6m i4맭FRHR):Zi_ѷ?m R*D(pvP f 4:DJ Խ6qE\S^I= !X4DLas:wF"@QR=Oe.T=OAѿ[/B6O;.ட9OlHϚE>/8jtIs;SNVa\$Y#veJ!I$q\<|сǵz6Xd }a/X^:m؋=E E*YJh)v@B:\`uHi Z툷$MKteD.QҕyI7ߌ<kE3B/Zy*\ A *1YQRbrb)l!%5ۉ X4#BDDcLad^1Ͻ!J Ca&eLPrF|aJq੓DjЋsc8R$$SS)IaJ+pYa ^40Xa)3fUhhE`~TfZY j܁O܁Xjy$wxw7]( (S/|F2#BgF҈PC#~[跄U'^X (9%S&Ȥ)^)x pDDO,1?Dp"k<ƂMXk`Jį$B+LsƑ2XIŌd iN(wOh91K u48+ꚐV섗 !`eM@!%ւ`xL U>(AfcT Hr(DRQ8IoY'LBuN(g`gδ`4JԜ:Hբ|'͐rOg܉sq&DYiq~5)# )C&6Jebxf23Ń43ا=`asy}0|aƻRQ\򥊚E91Eͣ=yΥA1_|A1Hztg0ɾOttɱQG'RGif p!i2 :nhOYx7\T+0!0i2 bI/plѢ:aκr6;=Լ0m0Bi#O?FV棰|gH BҖW\I7M$|YL^c4#TYVA?lhǃ^MJ#1+Y\pHh!Iq*vsۃ!0>PBwzyT/^Sz݌hV 3Z:`[5vI`&yfZ#*78 Ō93˭":F VZ5|H+oC/椘FɅȬ :c(tN6CȈ DONFP̵_&IM,>ۄP?dM#W䒋`RGQJmƌ6>f)\ĥF0g!%.`WԳPJxг@^D`em*.tjRu\n| ]]K ]6ׅt(@ ]!]1!"5+&+ ]ZT*,nlK1]%>+UB+o &QWHWBr$q c&jCW n}|WUBIW9ҕU\#W{[Jhj5{ϑa-kDW 1ldWDBI:GҊR'uKVJp ]ڣ?;Jnn)}W;Ծ@ Pj\-;mjߦǔ-*ygdXPҡi-Dn*eQW <]%J6tut%ֈ0c]%6v@Q6+HW D qR]%ׅ>]%s+X~3Lj+@ˎ8B줆NWl˦g &NvO .= -? b;kjߦDŽ\׆\EBWda wCIECWHW(!;_)vPoY*hzqQtRl|y n1Nݔi^4=;U3Tt; EYR *ڥ9 MZ&۩)%M+%@LvDUQ+ɡmZƧ7G(\uǓvEܙ3,}]K2x3v"mzQoD&g Ii!i8yADeI^k#hҜ{~W29/|7y0xreWxۇMm%-y..m=t!xmn4!ټ^UxUB-latv~SN6)댋6>/+׋ܨ :Hy,3pUb)=sF.!@29aBcg0=/pD([59D4 =&B]{@  +`Ul=N;[B7zf!% 3BspM\S~e}=%þ)t//͛30i<07Wмine>Lf<1zu?no/Şu~xg&׋rړa;wPJeW.F՚Ҭsh2닻9̨ p9k87ߑ̰] h?G nG]gr;6v1QFpw{& &ūw۠;|YkKF[vՁM7g[ #s1X"mO=Mo&[~|56zI {=|n嵹k8 y'_/o^TʼN4t= -mCvwƩIo\,.k'i}01jAMl׿I&Ia^u}^W2>`!oUҫ~V|)l4e!L{)c.Q BVX½g )O5L GV[5 UY>Z5!; KB6d &m O GnD2xF3Ń0RS F;  Yˆ*BXYL^H1h#ҘtB48 ٜG3_VF.F΂6nB1_gR/s{Bҹ?>;" {N)Fq嬢`{Rh;Ϥ3 QlKp d@>9"pHn`@( Ƒqip ջrf;it4e^7MMz螱ZlLU%7Q[D昕ֲj6ȍz$Ku%olL'ւo%Zu붟+p~_?ZYnGg;mfC+,}v87YwnxmRݍu"H-dQ 2U  q?>"Ou%9 D2>3 4EiZOUEu@{a9 p$@rDҌ)$5&?][ocIn+~Mpua] H6)@L.,[Ӳ濇u$%ۺX]jn:*!YYWF&Q8BQȚAW(O)4.dsRh=iMkK.:-zğVzN "s˨[R?U4tk9n|oͭ[^0AAXYf)WbѩE# yc ҹT`yaf=.8 &<%I1"$ͶBM!dQR;))dD1j΂(A;E!YIzpQY'TɞCmJO伵Y3qޮ̬W.E巿-dr>( Q3M( '5/HZKG}~egxj&G,6&L.Qƌ0i+ P d[t)j|>E].>_Ŝg^2(y{/ȟ}o{go'o*;zɟWS6Ojm zfh~Hu gtӬ 1}K5t\v:EՏx\ȑ򝶘=bw-A{ViS<'!1\Dk߼OGJJ%)`:uO*DgFt]/Ɣ+n|u˸MSToEuP*2#@.abU.L,;M("Jg]##T]>1H9˖ZZ,MtATF]߄6xN@\n'䩏ӌ|>`orU{~wYro_֘Idº߯LҚ9 Q.'TU[A"28ow<:c<{vR.@.TaUbZ;}Jƿnz!Z4 L%R(ʒ=45\}&/BG113q'aZ-Cnݣe=cn^s;])ti Xt;M6-jz?0<ɇrĊKf 5~:×_ ՗_biFY -2CfrNܽ,.FPtନ6XE21H6[Y#P q}LiȰ4Iά"HFb sg ,RrŁYiB26"0&ePAI k+Hk#qzUSΊnE5bT3t6ƅgΘ_!-QT;擆NWꄖn`?xͯ z$LT@@РdB4م{GIz}Edi8`OQmV(R"'o=Lr J)pxҷ֠O@ 8Mƴ@a^[RM( vWj3 ;PJ1Ʈ(9{WDOs0@Ȃ3)ir0Z/l $1C&RQ6hDtrsUm_iU]o~[‚-bυy9mlCdJP/.RO-M8.JK tt^ԩ`Dɼ 8þkfBBv;df%kDF0)_MR\dpR=Y#&$(DWbi  `IpTTNҺ8wECwTTv%ͺ]#Ӱ1bca^ ϧc1>xs?/u|)ɀ,B BGP˞_r({}x |Q^ZTD:K>$ʼn>¿(& 3RRAZ9@dMQ^R u%Ѝ%IŢetEj$P4ѺlcxLғl9"{A_NV|~ڍpÂʢa{VRzN^GBmAXB k7W2%Rn!F[\ݐ0cܮkz@׮ܭuxwӧ,g_7s9e_ Ov5MO5WtW{Nim3Wl(C^xx-3d^)A;q#|q@,gS pQHc 3@aH3)x- C=ם?] 8u]!\L"J(Y)U{%R$ yH<;dϩZqfb~;[|&eCHZ@$sQN*AJB Xr9heyR趇QJ(ULI)m meuNPP,ݔa)M΁fuivi' ٻM}|IE80įrDKPE!/7:+hP4ֻsQ%=!J>y ʁ/Re+ I36gglUfq/T}|ɩ-<0=2KKr'qAh6!Itʱ(KF[fXMψF)h({$ғOӶۇ5=;}[zjDz1bEaȃB\lYgZ{8_!e/"ɮ 8~YCMXĹV lֈ9mp>=}T 2(@!E!(N>I2z#Ri[ NXJ_ M<Ҿ`+R( 1Jn0@-Gx:PYù=uzxl;v7ygԴ=_{>;aSW4L!Bdѷ&e/yNȘTWI CK~ajyI=HY0htsbA{ p @8X U:i6LWVxvY ȓJpl@tP@CD0 >#oד?߳et(w1>xP=E'CrLŜ~[?Iؒhi?A#c/QYC2x=sſ2]>2qH e"3=pf 7}6mrzP\|O9<>at[ihZ:99><^H,eA"IՔ7<(#c~G'K̗j;l<=˷tt^~O_}{~}yN/髗|9R25 W=/Cn[Ecy-6k=(WmVyMZX:㸜r$я7|;Iߍv}X/+r;kG<&zFi|i<W1*'-RibAROM?kMFlkp}$eb)l,l h+SHƌ"ikhe#<ذ=1b6F[nv6V d&&4b^9lƷÚVt/' V7t.QJ] ߿Qi=fհmg?v,h7|8=^(kֹ hAs}uyV%4bM,YjQDkW 4̾\@fBEj+y^Zyd@;ˌ{^d/f6!S,|a+a^/ j&"~ֻ+jfOI'JX4jۧ0OgMvv/9^\%jΙ gf~+/MG1K?6<9.pxnFp;|?Fh{b S`3z 8.ZdIDF:qNw,NfԳo ^hM@sˌH@' ߻ookw63{bng^8ZnXYdR7}#' xw4;L9E&?0̶͒P&"ӷG#썳) WǴ4=6hz%y2r|՟ [rQ l,ӶeL_v* &JgU̥ tY8:MH,xyTYu0x2x0vצ݂Fr/p`Kћ/eXw3طC8fOHO,/LυҞM6b'++y#\V DB^yAD#X`{R1=W~=_Gb9eEXLNR5HAdVMȊK8HC~I})CYI9[8Hy# enkpg~vl?TDýv6II1 e%B`)Kq8=EOMaH?$N*&GHJ^X <gT0,|qXCi26Dnt,`% K2,+o5DN8E&n|pj v㪯=>Ƭ.pT-Y2-5| Ru y[)7EXUoua=Ocm- { Z!f#Nquol) ht@ R# OȎ{%:ĺGQX" hDKk4 $I^ c!;u$׉LjPW(6l:Xgi&_-6FEC**M,Yr"eScN 'Tkyt{)ZOӥgT;ii[8/RyMFM -:@&LpGL;糉̖%z"(Y1^U 6e+LI3 ,=1!ư7]/O F)k >F8AVX {d%nղtvgu_DA&n}}.<3SLx# _hHp5]lzs -iB4[۠oVe#N+ g{ۡM)<r){ŕ,ctJ,ij<(֞|٨dq!w|`5߯q$ 󲧣 _hO֑4HKi!`l5zZWC5zZր點+]`:=-$vBbBWtt(ʠ?kk+,TCWZ;]:Rr+ TwS"NWCWwU2tEhNWRtut*+lv]m&vEh;]ʁ"RlzuU7z߱npcWК}膒~ѕ@Wj6=Rte9suztwmzޕbJh ">k01u b3ߌ.g&|g8Ϩ'=2i!O?r46JO-4hs}iBhphZhއ%NǪ$:wB V!ҕ wmJW$TCWd-tEh5Q] ]& Z ]}/1tC{o]J;!ҕ2LjW]!`P"uZM!6Е6R$Oiۜ] ]tvpg{t2}+Bi@WHW8j]Y˘$FBWsw" tut,bWs] ]\j+BԞtE(հ+G`#M\}WZn(EϬ+݁@Wmz.n*LH{Hi倓1n7z׋z\ڊ~K!7td&>dž}t1ɟhߜa_*.4Gʞ4o߈ɻg@z,3^#eu5oUƺU,/* 3.5XJ7~ӡbR1)9Wc ou )m?hn|y?WWfF~3e1J&.66Jָ,2nFTD5@:#4WgenV ݴ#Wؠ)eh;8J#s(F-zth'iur.cMY4 YjQDkW jFvF~uleU2Xy9"sMPa+"``q (PՄ5yզBiZ8Ђp|'jrS ]ZNWR3մpC.jJw"+]])NMv8WjABz J38HWjt7w "]J3l9D2" kn+++B tE(`]"]Y]"*}+D`]"]9մQ語3w[+BDqjz<Ǯ6v{v;lOn(y=L2]=鹐лvB|߹лլtE(6nJ^~ݻ|R>}IBZ. ;z يh Np6iBkliP:9Ҵ ~Np;uBk;]!J׷|R]턮@w"BL5tEpU51;Bx Qʾe<j'td<Xj;AK7NvBWn.HS ]ZQD9aҕPQxw\|[tp%SU\,@WCWV1j7#P;u+D)).*+*"&;]J5CWMo? 0}U7f+Ъ}resH/v+;cK,o.3 NQvҨc;S ]*ƙpұ2|=}Hdm.aH PcJN,BmԲjв}Lխ9WCs֓ՠ%TZ JTZ ZZ =(o 7Cha'¨6+B)@WHWR[aMEtT#EWWZк:@#X2t̡"j+Bd Q .:@RFcWDWX]\k+Bl Qa[AҕAŇpΥ$x٤22"_ f1"#b(Q&>qIQnˮ!+W{OfF~q"+j]|Vq*WW!2Wlȿlq·#K-^*?bJ{*pԡ'v)>‘IO9س_uv(OivQ4z#`ZղlӪӯlPWL+q\+^8D})Um| ɸ^!I+4\ou\ʭm8YpuH3+wJi֮6oWWW֮ XNrCeiAզ/-@֮8,-<On7L+f\Z'[Ǖ 怫W+8 غyNܨ0͉e㷎+U\F\9{?T] J&u\A5\= \#J功WևJUƃz b]Ap *7MԎ_iRWσ88 8y 8&y\J\B\%uJyA;`7IdWȡ/I^}iw\~j}z\2n W耫=q{.zIS+xىĔKtn{!o4y1bu`Z:UeLBL[\H JrWq*=p qv3+gZRvwjmWқ^!$L Tf^!\D`]yAUq*kWW>3 Xd\\of}Rz QpqZmWn~p+U!LK'Ƞr̂+UqzJɐ 87 TJưu\Dfk%>t !/\Fԏ^@yߡqrw`wG5byg7v?zvy#>9Mcyop_M黷d_Oǽv|G0S}tӚ#BH^3^~ odջW܋Wo?>.,7G@QüH|c>{?[6@xZOo煻)3_>a!ޣg>zU'D\BAǭ~LC@1R ]yB*ڜbJdl{j%RaH$ɢ-A<Qr\X]O`bi j+} !8)׃$Ir7f9b{n= ;Lf sKcG'$U1Zr6>wAw"̒7djͩXc4 h6yU26DR)C4;ԎƬr1F<]5~kb1J!y- ;nL'B}kxk&bdctTONQ^@TIw-xB\@:*o[s#L mhHT5TcpAzsYJqMDnws=!v2ΒcO1Ǒ֑-~F_[2KjJKgCrk5TP )%@$@mMc IU oݺL@KɢX'īʵrBTчb0iJ37g1K(#mQuAڳdhXhJZGviva59 _&TXKb\Sb`# h.*u8:AkaFxvJ2V '*YJo.|M兩ql5 '_˒XLĝe]еН01- YZQ2X!UA`T}ܢXҺ1 [}6PPc.ropPCohgzGNb]@%E ] ʃJ$JEV2%+#`4ӥe0Fr@{:Ar@aY#ewU;do:~XTuaU' ;k=!V1b8&[WANC/ {M~yq~oo_y'k7مX*őNb f.f؈h2"]R%6؋U>FQwB*q`$vnE4epExXKf W`nk 95ޱ!,"  V(VWC+pKbEM[LnU2Èы灠087aY,E L5PZ|MGE$ن7KEa6,Bd ⬈)خThE|bIWHWYtgWOM@(l h@eVo P* ޚ^I"EIV-w;,a3h4֋%AC4A˾, +wD׫(!N^]|wZdǘ@A0u g9{==k$CC˿{J{gSCŨծ[sz5DvԌ<R 2j31r+H9=p UmF٦FQpPaRDXv-ҐQUr#as":Y0ݹ"SVPSAA*@5diY8a=buNi'+IēN96HP'w b!'!寺CE7 F-%=CaE5FHšb$UcbX vG~9=ݤ][/..ztӤ]tYϮ?܍DFo}/qzWxwN߼!N ii~/?w>v?˳ O~r&nxvv۟_kmȲ0NZC@;"IS"9lJgO=(Rd7Fl>:uo=U!)xkb@_3.C -'b=猞 ˛x;/y< UJݚJI@J 0@jK ĞZ`'z:u)^jP J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*NP $'KJ ,Lw@W(aGV@LzT@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*NX ԦKJ EН@uN @+N @*NQ ET@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*]%vI T#hw@(-)ҕb.]Rv \%BWNWVlUt*0&:CWUA{UAiNt,5 ]4rUAI5 U9;U Fw-#G((2|;tşc3`iLW^jk]좫P-(AWM_N"CtUk*p ]ô>vjHW'IWnEUVt9g3d| ԧ %M`q-za?7H'H\H.i芗ØUg EhgVcRs+!fCtUNC3tU j;zv ]]IIW8=QU+tUڣ_)ҕX!Tvgմ+tUڣT3 ] ]ii2+L;^8NWF:E2ޥrN3KV7+()ҕU傔;UkxW*E:v*(FfJ<Dk%,塵=R~hvJE,JAWMO[r猞I!{[+Q_вOa(.hONWWnyMs2pWø ޚJX+/,Qe͵_+79or7H:J46Mw_v~~.\/rǐ $Mܼdtx5Zgq9*ħ6FV_t7Xps"͵՛-]w{!\ 2n-yjxMup>]\W$MF4mق3+ZU5qb7kͻ&=m]fjrlԄĝJkNr .C3J!90昰~sx]srɆ}gi0`ĕ8&9G7`)h!H^crGm`*Sfs%gx&q.{Vi .Vlz̻Ps3)mE[ρ' CzRo7on||Gdtu{]޺`z 7FW j*rߓa3~oR?Gwj6\u_Ώn=ݬUܯS\MlVCr!ZnG_pa8Կ-q?1XT$l->q@:p#v*EΗ+yYj//1&|pURDgjcp ӌT{OQ% xAY*>TX0K 1:ic>hp(c2q [ 2f_T$|Hoƥ6$fS"3`m$BPXH>Θd8a1XiٱLحS49`qvNp<9%7o_ǻE_w1Y\n+b5{.aq9z״y2L>A.a|2܄7 ~oRB)ibU7Ky3M&'M޿_ӳ6P6n4jtyek=+2 RTzCYuoY?z5C/ցQn U>U̢XTwժFӁg0VAh0|*g=Ϡ*Ve6 C?(XgcQOݍADU]LmQzt o'Erqrxn4l>}NIT[lCT[Y#BunG2~3L(Z0O6I%cZLS44i!"Wē@ N^;Wz`UWN}ߥƍl[;))ܝΖ£[Jͤv'&Ѿ<4m_f}Hï;UPyv~/V$@ R<_/SѨZ}L~0|5E:[57y4֯6,yo$%U罷`>0(gsCQdړ4 z;7i?O:G=hI>l) k7B F~߱L~]S1sFJh-%:d [:sZktWYZjRPE_ aZB(ںg/_`>Lt0q0쥉_qSjivv :l^6 @()pZ)CTQgiF霩Ea U[k4q΅׎v}?H[݂m2 M,{iܛC2i~9!f׻4Tr}W/xVd~_Bwbro;ؑϪ='}C{:w}Z}VL0ՍZ][.e3.vO;Lj }S/~ "iܪ؂t 9+Z* bNa>@;D7 nFAd+ Q>蠤6ɹ60)O#sIR)s.3r))\Kd),"+!d`$Pg y&<Ǘ ,1= KoW/4h^in<hbdH~6?էk;h$(ͱT'$2$m]h^bKǼ|W0/)*hEG"Qf= f ![s#ӧѵ}NɥdfoM [*D E3e,wVz=S-i !ŨhIn aZ.s}tX ^SO{.hy<[yے\2Yw_;[.nQRJלl/(dJ,R=%KBzvӝ#:GD[e7MGW<J啯y=İ[Nr:wۚg}_Sŷ\xtIu l[_=ӻmo_mY6Jf=QM{v8[ >qAtg wzE!{ Tc)ZW.]܋0YkC Ҕ$l)TATDL҆ 0˜4>=e_c6 s._qfb|}0eB -&WR*ɛP.C Z DހQ o3lx%(-w+o)EaKb@itA@ ]Q;fʰ4OoƝfu,6/lϪnVy{%,<+8bf-]|rrmyc1^F!.x:(zѫet%KŐMO&$ |T)䄥TjӁ!UUIE}[n*VƮPl `  ~Wd;xbF '; ǧdzn@@ I AfmsfВϠUTfŲu1< I1Jj6r,I| EwrwRq_ltb{[b`gâ7"uVxt ^ڔ:E PD+ {Ȍ a/t5) r*+$mb*6:"c˥7q֩_҄Wø7>}_cW{zEܸ]Ɠ؇`DO)&pd+ث3>&2AUsѣd _9[/N`Tv&Hh56#{ʀ U-bole~ԑlWξVɮvl`1<2@ZG ~F ؤ# ń>ʺvqo}]2&5) n~|Gk"qUM䄐B2<(h2Qc]Ѣl '|@u%3w+>} >݀d횹\b8Mgu;b$(Hb! )f'k%n f`Zj-bZ:>ռz) B(0 J7<+BvV `p;^.(^o$w]jz|zz|_ OwY^aQJUr6QEiBR*'dߺ2-s"Д2&G IbBX[ӝ F뒌cE.O@Ԩ7m90[EDE%+h]r`ھ]DTg[YNZ%.bT)׼hQ1GV'׵:; 4sT;rCpPxrCrX9F$%qc[2pVzGrǩ~ha^w'Mά|f9Ȏ@ %%8x N30x|q0<9ǾY[k/ë2UPX [ĵRJt67mrM]Ay2K^!4 ˿kGk.Q,rynkbuks'ՌKUMx=vOG5!#/ߜ->xq" }9'߾V˹MN'?[V{'ύFv$cod0j0ެbyJ䴵O,hs:9XNrxXsJluyF]w@lGո{^HgytZFl9:zᢎtO#;6Go{W߼{^ ߼o^w 3fkD+jW~>|4_ CCk4wZaw;]f܏էX:cZmizLҷՌG}rkG 4b_x|Kt<#2^1C>9^O`0.TΧ~{o%-%.݀vچe}~>jFŏG~jKN,訫HϧٲtG y5O`nD/ F)KQϦb/{4<xm9<\ 1nVqR(߲i]Ts@*O :)`ǔUAJP*QVs(S!s:~[J0E;J@."TxdV٠Baa J  w (g8=ݳλ;_ dYܱI) ?:ke/+#5)}V3DRe**II9<(o/ʻp6eط6a ?Sd8 Ơְ5{-0s 1O,SXgzfM8ymϏ' l0]ucM Q5HD34žWc{5h^MdmSt)%KKpXPdUb\2!9j0*ڮJkh͇D5H|u\1cI QzDd#?.}nZ}BH;։7xeݡ֌3wٔ~+@rl}(>sYC,.xXz ٢6 īɘ"P"'b=g1siYzv`IX4[^(ho,P(yP/D/>{Z3IRTy+#Q| F =>$RgHRgUBIɞ!]oܡϢvxL{byLQe1B;MdEȀ7)Mj F:c G!ɥ xR*~@Bl  <Ayڸ[:4>,`$%P-8*J+ ʣB(!r-2ISʝj_9zv@*qᭈP謅E+YMeuFKRtjc{cշ)smJn ' FaEaz@Oa@p*-14F9?b2蠅Qv <Ⱥd*€`5#!0IWz9Sj?v'#J:NĢl,ChʚpzgbE;JDT&CҒE*YKUP* Q0h}.2}F,Rm ,A] қ8 }& z%%:CXiU!LZ^m2䔰eye 1'_Brr"$ ڵ@d oz&QEWtYi?jUB]L;;䫷o$__HgqM!0D9+]GG% 7?h*h%Xv`ĆQmbDgAAK5T+ 5O[0u`k3$ʔO9Xihb=DrF{iP<)|Ces^: v ~=5ntL7G/]HN.ltDf.X6]3al(4M1􀃊פSp d8Hok7iqA& obw}ضb&"R8.G@Yٰ XR$Ye\*II/D%($" %c$-y+2*mS1`l$Jd5g؛8wa\#W:x2/Fa{#Or(1flΐ.v `& U*$nRd^ޟhnE?;ܭڏr_,q|yti[@u6wgm Ĕxuxy_f4U<.eYKVYۀ:U@#kƶk m d_'Քr |Z ZR)@մu#@Dhu찮ӽm]j괤[X(:Efm@`džB82=oXp BMkQNY!Wa@v4W\\65rE΋grﻻiE=Y=\[yz:&s|k}lu"Y_x'v-, jJQNG]md/~`.x[ݷ<>8u}݄uAA'd7Wrx[=8?kmOU[/J6&u7 B"PVTި|ί2GW"r! Zx&~(D]&]MPB']|7ٍ4 mUP!UuT4E՛ˌi>ܑG6#lQ=}-D:EB}  ^9B (U $Q~bWqv6lXchdRAk q!uZ]84_z&f.U;ns U~MqP]X+4thMzo0D.clt[܃kwQ_;ȭ|oAǙ"804Cvusޭąܼ~5Nn(xxn~ySo3a3v6iߤrM} nN ٺ>MͶDZEZU`lW qTZS^%U3^e6d+#AW03SוPbuev*']1?@88T.ZJ6PnEEWѕ[>#]1AZRוPˋ++uru%PtvtevlzsrvT ࠦ] EVWhD0ʐAfLK^+ 2ҕ;FWB.ҊBJc+Rb  P.F] ppq-\t%֥+j⻩9=`ѕz,R箄Jt5G]9JNk(k1] mH>bJԶjd(#]1=FtgbZ!u] -u9)b`*] &w%>L]*w]+Ttkz28 &^0N] ܊XЕ+[tҦפV*8٠:w 9gWfn{s}vPz5'5 śewv]_0.V˸oI.f+9l)OD-Uח^׵zts~YUW}uY}8įV˫ywT*![2:> a=wJZzߊwv)\W˚Me_в895X[5u 5| [WޣӖ[}#6MCPuUp{M @٫V{4 lHԺZ4@4.= ]F!bKHS BIfZuf+>]י\tѩJ(mu֦xlt%rѕ2(Wt5C]S)#] 5JpEWB;բAF뢫8],}]17lt%S1Җj͐e+٤څ6uŔeu[7uWBɧڅ2Xa~ip j=dp%$jwt劮^AeA[zgBl }ls(K݁:຃ YiZ'*W/9|J2E34rL@Alt%+T0y(u(Dt%e+uSNFZˢ(f+xvdPpPs$] u%DWsԕHW Jp1ɠ2:+y{ҕbEWB"*]PW^{rkѕl+TJ(Nl/(,-C]Ү>}Z- V!kzuyo+ ku~?eY?GwSeYG~Z&;!W_.U/>u埏Uu||զݎe@%zK~uP?nO׋x+/n_vX5Wrwm{ v\J-#^" __i݋{H]EtyuqkR{%,gOgE%/-kYh?i돉wF|?_Ot<.6@pb56cՑoGnő_~dd elRLk!ԽP {;њ߱AZM]g.L;`TUYQVЕ/zik 8ltŸt.ZC)=颫96v1[g~x1o帜\/e$~*IrYGT je3b|J[t5G]9)VM7ɠкW ej+[R T>Al -%+4%>G]q2lt%+ҨG*XYԹanT Z宆QRb]6&&Fn TyT$V;nktFf`.M .a.ZR4S*z@HW|6b\DBګcSVj>B@9M8`6b\*] -u%]PW,،t3d\7ah!PRue\P ,U>Al+%H]WBg+I])HW|͆њ+jr)擻ܐMtŴ] e]RW 9FW].bZOdp Qf<] pFv,d`ZUJ+P5=c7 jnPFWiauW)[jw]ݾ%M/O W`}. 09ߺ_]LغR+>Z&(c39WfP7IH1y NОplG`ԹQo=);#2lJ+}o`#1 i& 6Pڔ S =d+\%u%^]PWl!#]r t6\ԹJhMJ[9ʨHW (̂벙 DbR+CTOW Ne+וPBY3G]9Pٌt%.Tl&LKj0蘁_=<ׅ\tŴFu%P&sU &`5e+ltŴJ(Mѕޱ` `{ojA=avR6̃+]tҦPt4BasѕzJ]WLvuE趒D '  s=glF"5#AZ\4-|T)MPH:YFW|4u%j"huFb`3wpJh1+4eJ#|t3.hZq0tW}:]5HWEI]`jOW 6~3tHxe秫BGu3GV >OP+K/ tCW BW ϝJ:]D2Rgs?G@ݲ6`y}1 Nwr#5Q ?v^ŇmWzCJ -+-E_ e}7<`pkZ7ոw:s0Ctq։?ދΉO#ACvfo_csvZ҇=jG|Ul`l7E)^]~er?r$h[+g4Gຌ}Ayed>!k/usBʺ )j`ZPtAg2$]iLIv@ұ$ ݀;mN^r{Io٫{pAs?Z|B|՗9Z\]5T%BDY]IkJf &D51HqSP)PDHAR2VIX()Ra9 %DQS(ZLm1jBjjsRj›-,NPRx-l!jؓ(-r\PKAb3cRN1e17:5'(:#Y+'"| Sl"g}5% Yf1)!dca׾h-Ԩ&QB)U,F`?d@. ]e5Z', ,V!}tB̺(y"i|A\~>d| H+;A1X2&.d"> s9 A]ҋq*zk%xN&ڢDu$WL'@oֱ1Lɇ]D8IRцB)8:B'̵R Vh+\bZHY}iP!MlR=D@"QOI=i_dͬS94JTNVR`i0f;TpG HL3+ -D; T; ^}..9Ɋ0H%ȣ_&3Uc`5.0`O-OXŮYAEE' {'O4vvNq2Qh?i(QC&lP%THcј%W UX?\&̤l`7ka7fgʉ)u p^ c`hJ֬C7̹$Qg {g* ʐ/*)1x]b ~J`ڬ.(WJEJ# Q1Pa \eid, |2)"$ɪw2YgHlY:@@=%f(!ȮށpIf  a ȸ@æ~k(PN VCH(PET&TMDꌱ˄>W[SR : {<&`&b1þ b@H :b2MHʔi.5k4-=% y򘾤5H:@jnːA@8_Od! T?*}gE1ʃPn23E"P;>VEdۭ}Z,G.$Aև2TJ}+t-A@F6F~u)=h Q ϱ}N*8Bդ5m0 j(]&[@h'$Dӎe>K^ϐB-v&BjFKh+jINW5 σ_ DAȜJ\cڏEQ(IIb&E;PGd~jDaoqVex, ʰcB9"QgɈ*B`;1kjͧ|M;vtҙtiHxF515-MzˢR𺎂oR[ QH }^3M-[0D;1dsɶ68Xu=yhޜ;4t,{u͙mf PS\!lJ`3 NBsecs`ÿ&ɢb1j[0ĪkL0$Ѳ*yh4v&x31i'r v|5l}IeFbÁwæD,UHm3Ŕ`7"vh:0^O)嘊v׌NvPX&R4cB =>A( =`-\ߴ]aر+dOXW&I1* |rQpō9,ZygTyƲ ax`(3J5[ IXfF"6zށ U s6")E :c@sjo6iعIq@ZxITA,R䚠9HU9%1haJY]D^Ϭ}~cPBT?*o6[ 09%p'YP k >NrNaвҠ/*+IBb8)Ah G=>\2 'I¬ 8 N²]rgQvk…q!X7iR\ b'FwLƒf5dޜ 84N]XzꦀL-. ;oKsu]Ѩ;`]pB;6Xh}/ԫ]ͲnMvw);CǕUmœЇR&oo=VUs'K0<]R0ywyO?>:{x7˝]`7[1_AgWi3-K:Y: 4t{y_,H{ \V-^d=~-?'#D.+\]lb^i~e}Z/oxΊx|Wå $*OpY8.Ə]ٞC3se[c%O >ۛ;)*? <Ճ9d Cޱs zx\V W^+o6]=bVO |Y |/69mOKZ+/!]͇%)tXQ%cobU:حb\ْ/߮ضRLeyG`Py eZ?{WƱ&#RùAs NncV6$6i45ca$&gaUUU6Ff}n𵆒" o7t*׼F_x#=Gny%?sKz_\*ʿ3op2f'OzrQcOKm8E4\<چ;js342 x 1$$rCϯ>  'QS5ݨ j(\ 挚Hk"@AP2hRotY%L|<*E)T~P%q T) gTmF;WT*ADn^=Gp>!]F4pC{0ƐfsVF\Bh;4h=P1oXTn=,xٍW+3ɬ4*)I)>RӠ8 $>,B^Hn@y13HlM>叕y\ +|/㪟g^Dr jXrѬ1UY91&1zð1qNY"=0i=_p>Ζ1*0Z#nG'H6<]DW+Ur2 94#\*㭦mNrxQQcP@wBxv'Dܦ#xN]8&K*mhQKkw^cRp"CQW`+oEW$diR@ 2Rv-Z KT䶅} Kv G*~!b TgJiJr.B 31~d9M|K A>fi Oh=ߍF8P.)Ӓ gw;Gީ4z45/QI[a0+Q!؍z?Jβ$|8_a "^ T?_̦Yg(|(>c.{E+j. ZiA&$rS}t ;eu?~xc1[=Σe%'&j1W%|8VJP RƧYBm>WVBEP Lt}7ƌvGlI7lɇF$lLa~78>gh0cOjzqf]$"aO^gp@ݍ Zc۟[51[9TV=?_? uUU;gH_\D=()DHLT0Xr%ejz)7"eɹ2:IRͭ{@(Q5Enq^BJFl/ BYN6mѻ#< 622qVDܸR$+*@/gH'RFIF:$29OUdM[BZӎk>Ήj+[C16YeMQ2#*}z1q(ND/ή,Nժ7ܺh-v&P^&Ut2kWU݉:1&)Ɨ"s2tMRr8wX4k.d\^R/;bNռ;bⲴ"V@墧4 Z9LKq$ .L?Xk'y1\s²P?e҆ ]PG,O֕&hZ@Gx_}#ǻS?QE@ a*S~rPیKrF r'uw=8':D2\K']n:QHy4)^ۋx6ŋ@!mzpϮ+{վ P=/("8Jr 2+ͲmJK|_E|Rn77ʖô8VsŦl;ъ%ů{:$&/H!y,d3H9C#?۞TiabUdEF6,ђm_ =~Sw6_hSzWiT-όb>3]o{л%p%PpA럐KAFJ<]wBe$)S{0"Vmp@_rQwRludN JKMvIvwdOs;ㇺ@1AsA,~Y_7 h})IE;|$a-6Unw274hxabR8 12u:'(t m*1,U)) Af}%M Ni]j² /{8-[#SPiIN}lNaVG kljWBYH˕&, ]r~ Xٟr ;ipjwv2#S5DYUgz3Z~u1 np֕37&3ˋQ.e(1=Yff%@/_ E1/G|0zt2-|H>OQ{9)Խ0ljSk&ʀԏA;}{ݦC SHiu~(Rn !L_p?ʧY(إ"W^)&:e([n&l{y\UZR$J]vSiuo|+/-gWEJJ%J"(&~b @CnSq<]~ie jSDЏ~+:|X7~q{$IʢljفF lzȏMS")L*Loo>j/0Mnh|57fp}Z~OdfHO>0Dij] 0-c^hv\>P c"u=ia)pJ!_DBZ.2:J3$e),<4 t+4sCAj)9{=>y%RH"O~.g>Gc̓M ؘ_pQ/:E%EI: ($x ?z+ՆE,|4/w:0JPpXctf~BzG k @e~h0LBqDq[_陙ن̣QA!{I3PjYjң Sw7^!J01S'># qxM ֮J"͢X*)(7xp<$$TLV@{MDmARݼDδ<5]M'i#ePBE/>>EtrF|8u!R$2 s/N 2&0˩"%d" 2Jǯ~ !^BgR DwZ}kJɍ3!À `9g^V  3*DNT,m2b4 o 6gU1N No؋9^ǏEŃ,F1ƵdgNB}1~d%p4 no{k5t 4YYh0 ,-]*|LcN( `/Trv1E(/$/b Ì:]@T%ThlFY~JpVW2pw]!9B%D) ͥbn #Ë22I"! AiEzYbC1QE3wO&ʨؖ_Mы%ȹJ򞫥<سvldφa1[@rM؜Hq-·z-btGٹyKΓB(pּ*ڮ]w*I}v%R4WP<(,Ŭh z~]1yRaHṷWD; ^ɝs@4{ +f4M⑟*ڭW18w>E "4MΠp " i\1|Fyi F>A6LUYȁ/n~[ p |Bi0< fɕ'wLOf6Rh'[Q^L9q[ȭeP_OA,0+NZКi f/ga0W|wJudm{;el$ÁvPO65Do0olo(':3 r>D )Q(Rȗ2uVoR8ϚUv/\^Ջ6O `JLSRY^E+Io'>-/M:B9B;/GP&ڀ-K/K~8N-]T!sN ɢ 0دrur^%/j7,]Yo#9+z]mǾ zaflS/y*KK_LYSRɩ.UAG#_MgkE*z'gunLuk~CοDyB3Bq{Xra=ݏYDȥ) ⯫gT \lj@n WF{;vUgąDz;oUVIoj/UsΧOjzl}~cC)YN|~x;Uiꣷٯ|BF:+ #;M4adZIZ:o ֲ믍P͐gЀ#T:pwXEs>^1FU% Mtu0{ksCĦ*%bRsNPiuI폙Dot3݊Ȱǀ|W0He5H˜25љv-zG[A|ZJ&=AKoG 5-RsȚ\}?qGۻ@-m_+Hd>{τCX8ˌ㤰̞ML))3\䘡EmQ=Bdւ"Ti BjQ/3jU+i12trJS[ ZAh4q XJ뀭hEYsdZ".A eXs WîB"hEF`dLB>8,aeﳝ._ ,3F7lxFE.UzdlA+S 5NRjP ^3w ڎIX}C­?k>,ŧK+<:DBk9W(nRvZzdl؂"`lUhm=&l꺨Ezt :]|{ai'=5.+j]We+Sd,A 7"e e~)Qw=*aKPI )]Rt?B7軔a*Py@e*ͱ羨a©<19%BX.䥽 iYE-p$_ơIy c> R|0e!KX)RLD=p׏gBhT1cGUjZե,3 t ZB&^9_ۮKK&+)16ʭ@y׈W1u M{IdUm 4ہ7N4݀gპ)  AGp<[wh(.c!\MB:54(Ray-~eݫvH$}tcA#c8R0&5=1чP,e"[|:)&.jҡc!ES9Q)`vT1CDYJR!`rAy˦DZ \^+Q P2w8]1łl6 fc.}P|V=&ˠp]u Z3nomYCܔ /CAph %~y$| 9GMM{ĒҨ_/5 33.hmq_,EuvX4_m(49^{? u͍wky͜c# E'աOȾ1Ln(e;m?x:\۹˔Uo't烱bv绦؍J$WS,Hq|HăZ\C."dM&Q5LDry+LE仵Yֱ 7ʑyyS\'*Og]##~G16i:HZI;=q(ß jCj_ŧf۷Em 9/(2OM+%~\/x,ːX-c}-ƪhER z,3iڝ'/B מv8  j:~[g<({5GRߝi !)T U LOrrk%bG-,#qH>`ޔ'X= ¢F}Il0*c`*߭1νZ3htovy RHʺnk~N-CR iU:Gګx[HfhZ+(ճ;9%Dp }BF*ݜ-g &8> =nsRN_$<'izZ޼dhrj6]񪼎-D۶WBD3J7*V$-7-#UN@qsqle Dj'ٻ2taXV"k5(鼵;y!PYu f{5k喙M?@L^ǧ>s6}Ô=ى5wp׽r0g)#x-j%z-f'{x~Br&p4 $,q"?1yN*dtShKau_1f.3~jJMV\F,vWd5E JCA_N/OSO'4Ey1eΊDJq+8  8qR` EAn".KU!M8ff˪vYo~p2={mL]Ep3k)uԓy^5Ì?Io%ڻ>BZ 7/tCp+0L$ |EtY!<,Qe~jFf^i\AQE.:w J5yI0e Z$N3a}3‡,0jBGT.,jV.gƤ9ɮ.XyInJ㊚P}/MhHw~Gɣ _ea@uMQ-LVPc!9؀ #p$p6q4-k:Yxaj^%MZ RiTZUSaC'FZBip鄎d]:Gz9>qvZ KeRvN[&-س ˸UwuBX rl{"XVn22`^'0 [zϔ9_MALuSY]!.K Biab#cť *dGA=bka*Qpړ HJ.(8nR?n۠ (V?|;*LJ&=256oQN9ncGZ;4#"(6nMS^6ók@9@p'[8,U*r2e HxsWR 0",Z}5zB2cguLVT#=P6pC+Nxˣ9pഗ5Q )eܺӈZR(aO-@Y1]7cUT^ӟ#¢G@-"0~ݘPpbU#̨ATm v*_Nd%K1@)u$OˆJW+D ;Hp0%@J [hM fU%3{L~kS&@baL81M )8 *jK`4Ę17ѸNx72U+-$f,x4 JA!= FyW삼yA  F6CA4 3CChh:gנ񯯶b(/T<4BP NȓSpB$A|>VL4fHfW |*EDK0FD޶zrGpJZ.DsgYN\i*s#c U9yγ Xmo"*mZKAb Xڼy8[*}B ;*ʗ?k-PK!U14QЯH'vyf&j>(EL_b PLkKfxr.!(%`؉R* N!$sJyփbWqm;՟15Ƞ3A܆޵S (NE,n{{d\QqEBphX1@6D~7rn6Ry-<>6>D'_$:ԜLGKmGeʢַ)% \+6MʥH^d6NՕmD E S@d+2MBE4Vy`Gh .=<&x7C']nQ#_ }AANϞrn@J-Ä95dU3,MF{$l?2pi0Rü`^=2.%e#c[/Y]^D #cjSXGqXu>P$*o|:)&.y r锥'xe nl2UZ'+3T~śʐ ||(s-6 a$Au5ƘL=( <+j zm(fT>w ǝooCvӊ:]^BZk!( d<\(5xD"{( {dR' SFS7 ްcZ5Nox386z12cvrͷ?r^WE \K $_e (\R/ dJJ 3sk; IL,c+ nD"9DG)0PA9Uv c`*Oj:Qa{H3 BJ+/SEWG-4v=~ЎB+*gpt@H\_$ _jբZ.|k'Dgtc&dy~[gU}'NgFFX3,&Kbq6㌑tyUyL]0⩡j߫"2^n>waXFB2cguLVT7UOKImij^JUxU; iw-ERh\'M8ijSBBW`gL<]Ae7v#.) ,žYm5, 2TMNKMk+]X>F?{WInJawaXxY,lcgyX  X*EtLŷ0n?a 0aqw#cT6u fi$x"q޷MW-4 ҃e. `,{`;]M#nW yVifPD 4uNm[]|@3: w?`AyYݙA9ζ8R<: (N}-VC uޏg r%{hmjS//[ (F}J" ۚͦѐBή\dL0cwyB*_2VTy&\b%'"*I'o[&TNbL$+giݟV-XJ@Ƕch_1$̨Nv} 3zT3 Qu'WpŠV#}C>^Ϧe9 ɴٰ͆:Г= šp9?^^"vSIv$Z\rJI]Lvgb~(;(u;)>ȟ/f4 ̊~f,g52ߊPzu 5@&L?OL\䘝۳*p<[KGĸOe|~u[Ldlbqbρ1u #$h% (B=r:(XlGcD*OL[lkJQ)>L)K~o AKG2?oUPwF%sa(,? ׉]?s9(<5 < Woʦiivmp޵@[ݨ˯-<,yCU- TXaE.٠b ⿮`.uԞ<R~5DnI_߭^_va4,]T: p00"P}<$g8)_B,7ǧtv98+V@n=LU$nt{ .{gިfSfi6k]+ʱRtu&jWQkM!s~{}Q@PJXNCkJ g"il"LdKCXb]{Oզ Cqԃb jt| ۝ ӷ:(y'E%1J)MqL,α$Owݐ\*-MK=^u} E9z]U^Fo!0+K'5`7֗J5ȗp _P;(!hDoǼgG7nQtG Xa't!Bw}[)u*"!3 Dwc`r^.9[nx3 t^ea|ECdHx˯a·WK8Al`3:Xr!λ?XC BB1h, Hns=ZGZw,:V ^ %uWMT &j}ų[ K6åh0)Uxp8dm<\?㻘^޸ wNQ0}zW$=)}b\ #Td?;+c R0HF,0C0`՚Z}yg)`|}u#;/QH梍9ݗ¾ Ls=qfO|361Ex?}Ū-F=`_*K;޺= i=财U FGVT2QAmP7n^j#_HD0방\G1(xѸKSzZH@VGۺ,7YicԚ0J ( $D'3e]ցqRyAgJqpT֒@6oTZHqpfaqKɨfR%AA),(X (. zW4W̸ˁaW7*La"ӄҕ:/ tE ␱1<@]0+4J :qVqE<U~Tq\g%m#+|Bsg'j-50akN(cKcf^fV4֪_v7dXQi| a' cdּ>sӬ CܧY7sgl;=w `VC0^[5Ӭ^z$Fr~kNFSXKdBu!ڻF"z t47|b1?-""Xn,!~HQzP<>ǯw%,f1X&Ošͨ”_ba/0XK0e;NNjbQ.RQĴBYDI)GzS\ ;} u 3Easm ]~(uy?sYQއ}łZАߖ ++z0t\)^{%U8}1 ''Z;REB*>JbNF`rUF& =&&7VX>WIiyYAQWfz| 2' ~kPCc IfEy7ҶOeL10O*I!YF6t.Rh"F?!e@V;2-d PS!!gle(BJyQ 8IpHu L|@F-xl1Em]v66!3\9S91q:^GYx$,F ӴAcYp KdAHM9e/gWWST ?*Ћ1$^xR@ Aᑏi ,8FΥTP%8$l;N}F|Q(@-"VS`a| `4Jx+TJOi4(}஫P1)cx V!Gdx"ç+A"a|<* #B 8 vR*49ވaYבa㬧LUk,Rhngu"v=Ih zw? %7%( $ SӠ4^i pMWaz$bZDHڅZZzLt"PC]P|x|is$%r[te1ȊZ,);rBRk⪵)$J9UĴ[vBS8l!Ap1= ҅54{RG)cR L굺Zum+MK+pE5@Z bzWBX=sny=2'o?i]Jjh|'tO4;L 8׷K)()54v埤}&щ_' 4>LJ0P$޹v{5*˄{otDIx3P;i ``'nTt\~괒gz۶_!2-z@:I3$A'U-KD[N6%ٴDڛII 4˺~!cG88 PP(*DQ~cߏf|>/k_{anF[t>.^VAc8qv8ec,3]!mK\K|܎}8MxД@#pJSRfhpt[yB+p&u?҂ʈ"d<ͭ\+"#x0(SQR~w:A(ף'MVC&"1NDAKB&U:!П R2( d#zp>~ r.d:;Z',0} U&M`ս;4&Hc"/O;WPY㟸L:MήGdUmMt}=k_~QHsҾ)\5 "8Ŝc?5a͢0"d(V ZHCV9zm{5l ߂E+*n6kBA0*lYe,[NdTep? V!QqVX]|/待[Qy.GRoq_F.vo[)4ij1Lbv9X jD*d$]QP7{V}Y`h󅷗e6PUp/y@9ZxY8+d 4W`O]VǑO";vVĠݛŢKn2_Kj7uD+̓e ,euVnNIͻn2%F \s(*rNt.C1B nk<Eӫ5㺭JQ&XĮG*|@a JN ')3nlo6E'ྪRt/ŠzbU㈴inR! 47 ʀIb"d1G8b޿?!;s ZI,Vf–OR_g^"Ͻߦ&({tף̬I6{#XSS{0#)~^4]<>fمW`[}Hz⚶Xm2kzy[YJ0慷^iʦV`-Gf# /?yWő&^6L17M%3ӯA{=#A}3+ә[Y4{x'Ϗ$qA01GPgpB >Fp0_Àȯ,IW 5@!7hfhPphQEfX$T_)$ @yOӻYr?x(?4zQb6q~D"&:NaeN ҖPߊA&]3W|McT%m^'nY<\^hk@BcM,,wZtV A;u[UuVf ]oG*SB)elcT jc^S .1QԖlmo %bV<2ܧ}0ȱ4-dqZOM:cUμ:fɫ fk#ΪA zH¬r-+G@WYdL mT 8Gyj16=D鮒Ѳ&Bٕ(V|W7|4N0Z ?kX%y jݼaHi\J^`_wtŐeWᴟSvL/9`Z*|h79Q&(̊J(=*]<?(UmQi"[?H[xe3n2Ya5)fLXEc|GZPq䷵kXy$}ߋ.2 9]*E@m]vFû&d}A2Yn[,)H)ɾx;XIܗ/vT},m8fE2)/R I3RI00]LNfi/~r5[q~ $(:єBp7M UPRJ~GaRX)fw'(F fk@P 6`A> M:6vG(i-L'K#G)AAkdMA2wVMڽYUfu_%9U)Ve`*d#?3C~~.vDbrm|lWԸS۬a  [[)뒧P= aE}`@K3M:(#%7T_y]gjf{27+S)$U>/ AN_CL&0b=t`Э̽w FmW3 W c{y%nڛwhhxRK E[r>6|soM2n|/%^0޴snH٥K'Իe]*6CٌN q:.VGYe]1Gt@6Æ1(TUn 'q:;‰"ɾd95~le.U`1ѻЦBßtvK*4{9S㻩cXˬQPnnP$ߥgK gQ$P462NcZ3I|Pk#Y$UUx 9Uc q F5fǵih-C䩜HVBOYiom 9bݾ$2y`⑾ P8MDexϼ5NjvWr~7{MjTQR4m\{JQDΦ>X'60фc'M M廖ZT:WV#B5vR)U]'(DTPqىR 7>#=O C DZ#;ADk(EBor[ Ny~ ICJF*)mAhӮR88 ZЦ"-hj<Ox0-N@iqe 9MFJwJeKvx'z"X×J_f5#Pk_V!*@OIvBb>f*`6.՗k4R!a53k1(]6(~z`Ӈ|D &L#oM4^M7$;֚o5P* b_.5 |¥/4D!`3rO@H6mIBfoB) R~pyϽU6B wvx**nZ/R1JBŪFM oR ="Z#5GjU~m A >dޟ>I."=Aw3xqRyX08y2ʓ!%+]TLl4i $Р)!,02$"(M H4 I,az ׅq6&1 y >eAv7b(vwtc/ɥ,eä t1ʆ([rxhks]= fWCU@3" C;;@λW& S7E09Ci[w5`–8m{I5N)%F S~LDr;u-9_-}W&fָӦU)T`-`ld@-FȐfq bva~GJ eK ^;eG >9Ac`]u.ojZ])2NueQX#ce1VpdkvdA ]$#-}_k[ Mg;JkJIYmZ04{^&'IA¿?SۢEafd qKxD)gd]2s/LG>sPDk'/{?9 F0=&yW\?$&t\Eimm,6}F[_+ Ƽ mUfeӔMa&Axl<‹O{aqdX717M%3ӯA{[^{3pyж]TPR7^G6YS$y ܛPN[΂i:Ys8wLܒL/ahYqV8eBBi2ὔXX1Uxl 8%V$^)q %XѶ7br_*NJ.B[R$Oc1dY#ѦܳBuү$`Kl_&'*S!`$0-L#4$#"Eݯ;Sf>L>pe\qNي=\6 ?jL$1;aCQ|Sg>Po)G#KYhEe@FtIjTAJ҈sCQ)t@p?{׶Gd>&AF~Y,@/GW],ɶ$[Y̿oKd%DmWef1$OKD&y)D-gLMqU uR@l{HR1Wi\fFm2rA5k&;ʆówQ ۥߑ{ ES[^޽|)PVc_J)$궝\Œ+dS* b'r 2&`ᘝQvlj6wkJucPEy_:3@Zꔆ v>qlX8oYZGVW\ U.b;fxdY:~7 Dq̱VQv qP9љl; e[z$;e/O{W^#(BɎt=4.#VSH 蟀X%zEXݢ!#b]mJ*]rSl,wgKmri>GV%ÒPrhncRGڊc$5+9J~Qcj{0BS5Dk·t jWf@rnsil}RvظaՇ)eq5pE?vҏeVòwjԕZys@q|:? +cpstUQת+Зel#%P)!v콿`r̸eJӼ==.]J;,v )xy׌3,j6hbܵ|ێZ;*RnH& tXLX<.ިHeaF_zU;ƮwcH271BWj!PJO1+x~΂y$cLݧ1.'ѯvHQ?)YtN JR6D5؀Xƶ9_ڀZ! &V,^9y:bs`z)XIK6hi JXo{` |NܳHǟq@cNZd}E^]q ,na}9_h~5 ;Jk뇣Ä^䠺𛓏o&Vj)J}qJ&kCX::㝮|5'`8G)g?Ǯ41nj)sOyc<}Z{ڛMXxn2--5XGX_>to̊'WDRD:(-l433]X%U"#W€ʀ60]Q,PE_WGn|vejw^0qT md_cr:R, R-?PNǾWc sujWgp5&.zW(/sgkA_<<+WaM-ճghqڗ곶(;_hB/Ʌ !MNt;*M"mC,^[j v=rIyPB${!!KUZq3 >*}v66 TFȥ ~ͶFis%Ĕݫm-z;Z^K@jwl}QՌҴ'VqUm&BYjى.Yj 4 CiJܡp !,)&DB- ̖)Z% CF|nջ*m~K "H -ڒvO\HwBXm[mM,C0+ҫEƬr(\ĔM SglyU^6x@@NȀ "܊^ڰu)ZGl~+GGBcmid`7k*(b!&LRgu[+QI[$;,$Zb]mGPÝRݯWmC۴QBW_Q[dut)CV96uWyFJCvB+)E*#;~h*9z IΉ6׃eXJ";ip6ܪDҫCT()Vv`kFveH9mKȶ7Z:ԭy/ .xRIl<g{s9(eR-e1Y#fɒd<9;aE%y)<:!=rAFQ:٩JS3vQBVM@Ui%؋Iv7o\qKJ/c2Be٤piL+v^ 2Ҷ:OCGɖ2;UdTPqa!,uE΃9V_\fQsYX?~mH7?0);"QiK]yN=hQE}`5)_IU>mm)&N~ ޿`4՘SzRf0h+"dܠ*;c#~j TΫj톩( \`OՕ@T YRʈ&ld XLkVQ ~}DXn~2o?ـo^03~uYҺ, O.J\ܬmβ~ѐĜk18H%ۣ1#bEi8 >Ra&(F ԽN$7oz>{}T\Lcϼu1|/~vV۱wtѭXSޛţ%ϫ9^%t}Ww(#|}JcFH;fXc+Bpީ<*~ ].^!Y,!GbI!AU@Mb_F4W};81p] +J>Xtж׹#|,E|s>G_%W$zaنSbQdbN,hZ؉O"޵4Ǒhă#|DGm":lw1~tί_P֣Tʒ%G-)YY |5G-PS?LtP!'îvpU}#TyQf%V*M|(RVk/ڤ[|O;tӗ}iۘӕئCsz0]4w31N\7[o`\7N8nxA$[$gYB l]6cd90~A'MhX[c-}E*R|aoW C76` m2d97:3[EDRfKxrۅV?3[ǘ>xf}0[zo^؋²Ŵ{{yg'M֌4xzf~ɬ5yz%;﯄Y*W}O͋6o?~W_/Kտ S>?ۿzA<[}>L ~aGS\g-T߼F ##3VA)͋y|OL327xϲ͍B4|ڥV;hSmCL%6-P.@G\ Dl y&gf놅Skn u˺TT><|[pZ:Ln(b8E \=Wbs!E\G LhXf{4h4Uݲ(@\8 ꪖ)V7wG9Lz)I{&`=yc)S|[vzl:(sL+hJqqj)l*m4R$e`XH?63yS&.oXmgo0dx¾lM"tLݐT &?@:E7I@4}SY$mڦD]6t] f5/)H@t$m%uU,ރ``ALxKbN:tSTL}n!MbQnjfUR~ \lfWu6#7NfܤnvPpX!`-V0e|V?_ }}w*$Q55}I{vjvҕ&?=jQFl2T[|($(iz 淖k_l)i%7߳}0LE3fKWbvD1oezUf}MsG[ߴjo$ӘNَ.*eynj٠S L`bj,O\mLc so`-A+3 1{ & QA!`W=q$i%dU0Kv ;ad[8ܿ"{G[q3uٞF\.u>sa]V|4?sgD,pR'ƆOf$RMuo[ry։ <$d΂r݉Af^PRrR M,g44LC MT.-C{jmo]x6;j-K%Ib>hw)Ņ; 2m1~1ɀ2Пo/hRJ=Fe2% 8Ԃuٓ﮻V}:ځ^Krۇ6P-(p^{ȎZbu>-{Cxw֚ShvB(&0NX<賖Yfd)9߂yS6ú:\qhV2Tqy_d^`qǔN.F|R&I]iԹ =۔TwzU#Q[R[je_ q^°%`f&F5?,"fP$R'>R  )ЏNrSOv؉^y6NfJBc4MbjNFwyr 堧9EaRD*`O<ҴlT>a6r_-]#UdiHtai4`2ujjf)\!E/ s! ^ s۪ TSt7NtO of2e&W?ՎEia#Gު?c,傮<;QzYē$>^Krɢ?ސ B%3[WV[b+P{cps$}ӹ왭~giۇPcJ -Z bhsQ⺣Gf2x-|"(j*Y()jŬX|l'Bd>=YL%c.1 iMR'Kn$yI"Dp\2+OvW=z+U٠e'KӨ1FhN-`УZ L~faBQ0p~RkomPx7Du V}432J::hR`&$^)R 'a%TnsRѮH޺Mr}̭MoKg. Φ$9CКDNCHa_%G4W IITE:c_'g﨔,(m/ %h(+B[x,tnOnigݘ^z{ĔRЈv=/M?>~Š8 +A4fqϢE,1Ƣ Ekm? ?9)皖ECL4{$nOg k&W=+YqT6}fPU`DHݗ>M.?(a7ZJO |SB)sSj3qy(KPV*JGpRCn n+e5yeR tJvIAcGoفOϭɻSp3@TP#E?'7qh܂ 2E\벅uiԨȃzȣ$a'V[ڥܰio 7ɘ4y+~S;o%I]wQvyc \ΩTl[J{ceŤ}*ϩn%ceaߗl#KhMjۣ7\'  Yc vч+ ]5{qZ s*(IsIypf KRd_ xgӖ VIڼ#SR(q͛ŕj=5л]̊QC3̀bDTaOD`G\;<.n+*~VnХ >>QM׏ӭf W/[_w)U lMS7(uP`%*#`m9 N#Vv5#De ]4SkJ~iZL-}Q>HiisZ@E`#vLCWMDM1T1ܺByūdW&>ן>\?8MWֿ;Ry52:ʣR:=>#NjMdZŮRfy6y&lS7ɫOb#0hi?\^3mI|9~}KL@y>O6Ő̓Wr&m*J&h@Vlm0Kl84JL݌BU 7Yf9Ay}0> ' 6@.dd[HGkJwʂn[:lV!/֞\z wL6*&gest4[dOIҚ ]='G컵eW&JJ \Ғj, 0r^0Ofmߍq/l!]w7e,ʙ7\B>'-Ww'΂@k˱yzC;u"=[Ƽ1/S͉swssFW||fkBŠIi%%=%IuX RϘm\)-׮UȾ81Q 1!1,(Lcۍ["R &+Q uGX!>SV+4]"_ߐUƔZ'U1+1zZ}}t<^:1L~xjNػ<&IQU*%L2.~ȪK*ݢ%#bu7ɣC0֭{I?3:k8+ί?x !FQQgm~{&^P8Ti@\EfZ^zܲmF}3BfNqrD-s)+|\>b?#BFnj%+[H=B:>~YFVegĮϻcWo\*z,OoFTuu};hUQ {{}Xbv{=vd#n/'|t> vW !jk$+J<”_ʯ&[t+1ըN33vIuaW+:lT}p] -_+6sRaCejZݔyC~pd7 O!:cQmcԮ1ЊVdw@kP꣙sWbU*1 9%|nׅ *ыb6YeU^t56;u%ŞkjM !KkJdR-E1yQ7ZA \Mv1]{7[HP_k.v%9 IQ8Őt; t#(qtPyY/Yn蹷tC;c_:<ͨ{7< ekأiM;?HViɲ0_GɨzvU7bŸp;JIChgP'^ȵ$)YZ/j/Ǥ^]l[%󣥗9u2aQ\^@鰷kI^ ܗv5xɧ=E(h&QZMȊu+SrR1T,a4 l+EATVy,Y:ccI)RqtDz Ԇ HdF:ŪwwmsdU3PIec-)0jK*f(\$E}ӵM6 ӳnmU)fb,.VPWh^2t=U䉪UX~F?-bQ\cSz6nj@TM*})wc 1>1j8 zcxXi!.xYDrߨ1삽}g&kSaWDͪnWaWŸ)ȃz%z$o[یԁ Tn+ѱ[}rp@!֝cxfDXy[)N>ۦOY }Aw $REJF\ڋ1ʭ.ڋ`c cOB9ұ7Xm;J+8fkI\U*2,C<&߉CTPzl'QKShZG("V }-ko}bU"v3QvJ weE`1g);Iv%V/nůV/ap==jK k6SWK܂AH΄:hK5|=P㊖Zd[C AQy mnzc ~y+l_[Vf٣n[>~ >=vԏ8gj8 "u׵h m`5+ [ƈGڴyoE׃_lQx37ZXލڜӵv"_;R,=9:slZcdqIۻXܻ/)"D#=oqjɺaα+c <G`CqQ`Խ蒒rBL*-kPt[SVR^teyֳːJsMBh\[]t4IǢ㬵 *cl(3@;9QQgʠꌩuPa51 cp`&/"Y|"aȫHpf*P BGA?st7QPx6&̔K#%Ru?,ȑaS8$"%-bAo\ ݈D/:½Z#Ե~O/ [.!8hFɆN Л&ٌ #VB-.% 4+VRҰPK!WCL7o#;I$@s$lf٣1 få#a`/Yu7h23Ez7+Lu3N+B. Tlk? YlRlċ.e\ү8qСknPDT1ݾ|;y3CMK܉lK(pXR!GHTc8P>W{AN(Si(ST^tHG+yn~c3T)5j~ K v\ mUjCP)SI=MʔT*%U mj2^, 8(XSVhQnRr@dAeae9(Y~-n97pg&Cy RJ 96C3ك7=IIg4 yuׂIayؔɫ"v2 xUfʥchHp\ȩ>}F @& JC&b?>-QjćU7ZQ![w(X1Ϣ[jJPĆ3dS*-s(kJpX%N5ʴUV)'[|N^dR"Pw%rE["S˔~qn_ > @on C+CvS9fcͺ24* u 1U صfCԅ(`"ٺغ@96ݱtk0Cv-# k+hrp@;24!W$M ⍢\=L'KkyG㢴꧿>^J2T͆2O\^/ yFHpb&c@'dQLe^¨ 2(f;tZ`C@\ ţCત/M3r:XD3ܞ?,.fYp7襔:9z[b-ǣjSJƣ%s3 R@@L~ O-veYW3ΪM8qLcbn޽}*LKwD%D$7('#s1,'+i}Rv{9qkN_\`mxsO/.0Z};H)t}~5*OS8F wz'oCWvC^ÕEQ^׹_1FDG\=,WA?3DFnĒ-xXD}ԔϚ|Sv6ĨГWo`:+wݦ[#^JGz}/SR7Qeڦb1>qYH.ĉ'w<֥Y$"'U=!~N@>0q5 '==.UgtrX"wmGzͮ[]C`{C`F俧CI<MmIlKU_UWWɢl:EL},B22 eFƈ9є*mdβijx2 JF>z^gY>gy8ኲ%lj[WM..1{tLq!h y}HRT/ryDU\,>5bXG. Rg\0`;[LkB8!ް?¿JgGSy=y5&ъH;l^pH xk{dj1SBN!mSHDθB2?lL  B7:}s84 S8kM |'#0G3r4c5]^yذJAЌ{6 "3A._jo}PO;oQ@ϋhT o ^x#_/W%ڴ!ɝcNЖ58N[)x_YvKUSXpŧ^ q2u7g.\3A&|\ MbOrhHB[sY95D$^5k4Vm%؁X4vJo^LJ#&riټ&Cd!wbꎃ#tyM.6GEDglv7:,O f'(h>FA"B7; TŎkRZ4dx`S'x,ObL\2d񻯥;y$ަ# GM̕ĮЄ\#{i|p꾂;%U^1b_`|OoU㹲+yj>IxzO`ML̨[WY#"2n1pDwm4.X}jL?>䁉s1lkR3A._hI1d|j؆ڑgY. Gc-+ zz,L49mS`?x4d] : 9`03CXbZ9\f.3r0#oLfx0Z23fycu0'2s0#3VC>P;>"̡H@=g˙ej7ӭ5iR;Ѝ@X*P&SL@ i*GslYUK@W[#_~zc300 m39.'h1] yN#8lE? e?( \91,1k qȫ1,S2{bdm=95-xokcY_bvZ_#"Z J]gL2u4zLIMj3cԣ$Z{@QfoI|C:s|c 0KX/a 1%7r|#o9qxV 7LI A<\ fC3Sv/K̆6Vns2̙ Pڝ 欤v["j -"A֋Uv>=Sm^Wޕ:.&+\.nE.äꪲw˭Ll%Zқ)'ֱܻSZuˇ>RkʇNL"7iaN?=01KeVK6}fIϊnZUOTZV*lAcðT s cqu@~qKŊTiytq;I|ü|;}0gLhq (w@n;M\{soBh?x-X~vcIrDg׿y:b*/Çѕ̣hЋne<+u[ȩ_#ozs#5gy*ܽS` X>M;0q猥iUAhTYV5;r}yf"Fzc >ɱ_Bow~"`d⛉o<\ԉ18wNW0/0pPpo&kw ǯ4I 㯷՟>siiO.$/&\]]>fu\Iwdz,,U|NLCl"lHg-2lf7ԩYiTQcpgT K0OX}0R3K] }bL펄ډ:,f &* rRev+ʓv@wȞ3Ƭ$8*3[}{X#^gj7sj"։Gb}vڥ Av'ND69Q;< ِ¼xrRevȽyj(j7omjJDl gj]gF3;1jFrBjn/>UQkTNgwM%z%Z1vOVR tE[W@Ԋ{"ȝǏ3^lj;9Sިa9̝^aA9s &_"kcIKw,)Ӎ qrYGy}:"iխ@sAY1>3}^DlRL;\^@u*eXP|-([ApeP⛋سAFE ÑO/ ĕm겵x5SME6!ȊTGeKUA“-V/F]h1bZ90Ƌ:?<ĠGoc/ xne|!RJi wů4E\]RMJhebJGU# -8oѲ9_q%w{1ھvY3ؘz7R ^93h%3]0l^[}lmI}!x]hUT Ӷ ,+BBf[ kȞWʬkei,UUKʷT-"hlR-;箿cνfw]l8+v!o3!;-2G﫷.򫾴j t#t*CN2=OȶR{m!%T r*P̍]I.w_u]MY='^gn^gЯu0^g;ɎA7k_tvȨa r]i'_BvmޮM @5Q,Hyo6XT#c H=潰5'!J=*?"y ||$C"sلe3Ll^tI2u 5_?o8&f&U07к)m3U(8\d-Oӷ{wniڽzGop،s0|wzv=LOED;'J%k)2I&k rQչfi"[2 r&)|ڝ&5zvx)(qѱևL2[ }3;j99u(j"B\0\z r.]>}OطΉڡ%KEP]QLA_H8Y9v#KmOvڥkKS6gEBKƐg:ǹX2;\=e~N+cy'y+lzk%>xDJz+U1݈d~U{AY_k?k?k?k?ҭ0@u?SuXyQthtL Զf11Zrhl%쨵q+|e"ft çрI̪4U<9[ú7?.X9( ׷ИC k T4ˋ(sbN='޿ ol9~z<.gaz0'Ύej7LéS;pVo^X%Hbu.SQ0݈b0{Q|ۿu&vܣUz77$/ǻCK۽Q?|l͸&0-ؑUby̭WQ.srzYl)! I5s?li)B4X?,Ae Jx1ȯ&zjGF]QSEMX; @)bx+7ۨPfI{F+4fgʮ0Nvv]lq ,K֑YWJ,Rd9Acb[Q}J=O=:h7CC)??Ѩ>('pltc÷/0ՅV] ~X__üu-o0`dZRj߶? u䛡,oݿdTDd t]Z0JNH9b7sl &͍s r+<& eϋd[I~IZ4 .N@C P4y}X>.JkK7AI,JcL a7^=nzMn䶕6p*m/>Èd.L5Z f꺘㾳|L%72`5fj0SK囟NtkͧS p9mx,,2ݗzdT|e^t.폦>R~kIdXb$(NBFX ]Pi'Sݥ9H" ;^thMwUW"ю5r)}>rhY )AG>K[o$I"&+N#b h5qkR*OǀT V[QM R|31;0hpT#$`|R]#3YAwIUM3&I:}ٞ< iϰR4gx,_YǏh=Q^tk^FztG#/i:$h(wH]N"۠yf'T/}{_jLO>H TN]7 { o}_BݝH9X/) ~I"fa.iȡBOj;=oӞ7i_]0 _V0l^ BkJ\=^/N.I#jЎ51]V|AUYgL1$x H8C* Я$%gi#>mBzH&@ M7-θ& a;ٗkJIaMX#$B >QTŽǾ̢tLb\DL{e@(ЯE?6k_OXKPzd;E)Vuj҄1A;'V$! "Ie;(ZذEܱ[dXEqqlެZ9=Qy QLW/mY:Zz9talHkfXQ w 1a!Y*4fF)ruieMa^|601Y4\ZBzTDH%nx۹Q|@ORl{}5;* 6'ؑ 0 $F#%P*pPȐyQ4b#Wi̤:R@)pZP 9ck^XreF1p##AZ$QlLK9WUBU r0b.uZǑLLp9ƩTXwcI`+!@%lRƱC4vZcǮ a~4 !M8GG1qDl݂xVbCNȯ VDqp5n #tmۅt 7cJcK>loJW p4 wp4J0'|(2h2W@vmhjzaOɣ~L*j ,HT(Wf;h 4uiv(}w/YOCOU޿?Uz/7 o.؛=vg;oQoOeeZ3\E J|P2JDab41S}i_M8eD4;|OxKc/ ua[/K2}D]oCCե Ί94 V:g8o hQU)Af&abogxHIXc5zBd]a _Uݣ)gQh%1=(()t}뵃3\?㎨Sbak Zü=+'?7M}ao&jCT-цhZr|-iONL=9~+L>)'M#jU#5? 䬬JƉbJ Fsޯ,(יq8  j/o,`Xq+ s9(&oco7m_)@j✍09JόIQjU,iRo݊ +TPzWVS\P;Nȥ7m2B.IxbOׇM  -|ZE5U&C|(S*';y/$*6 VKu>;WW26FYF+ɷ2:8!7 qRa'ӝAWŽRFWk =q?(Z41XBE.?lQ qX*KBdh" !DFq$K%2l;Ns n 7+h4'*ތ\*gXᜑ-LoцH,LpU%4IkE5 d2g8`^IZC.g}nBZ'6Q|iW0eK x.VX<~҇G[BlqB$!my,_bS,T[L A$uѬ7E,#>'ϫ^'3'#.Gx䮕xrPqڂfw׏'ҝvO[8@w۹ECa_8O Pg:Ϳ/~F TM4?_?,0 ç_QoʆowӄL{ +p?]{Nm'$>fY]%3qd1hmAq$JZh[W 7s:%޵Ӯ= p"P¢7 cN8%s g;<=' 8|CV4 bZS;jm~z6HtW&)^#djU^YG 8_q!NcJF#@QDĚ(q4ni; pV _ Q?omS0tOC/O2)zDsxI<1&O ėDЉm0^Fpp4¹ޞp̀^n?!DZҪ}^ !K 20!*]4FkBضvLCCT0DK&HEL3U Yů-lC:bC_-+(OOCNoX+&u3>n󝯐%$SjͻHhwWxrcg\7m Xƾd}h:1O4{}fU,dK5516y8~Bpmox؃ՎW͠ItWW1Cs325ٟtx^n?{~6Jpgu'~RZH4&xRŹVqAg#D\04w E1Sl,Y3- YI?~c|eu1΂Ռ'1^kؠX9*hl9U̷ X022 A5)0i%I 5߾nTI]\2$".}}a'U'$U<eR4I\B6- uZ2L<&Ti#b.@tro y\Pz<>pKL|6^ݻAF^O[e[܏3Wn~)z׿ksqAu3ws0fKWnl[:ZzsA(fhIA0T!/{.3wίy3 ܿ AE 5/pra5w|ˏzu2u({ D״`R^PO0HIijqZ`n OOt`3;,z]g nƪT9>dёq< Si6Pb屣̂›Qkg01SƉNRIӀoEhzX:Gb- yu _a'_ܻ;85 .%4΢+oU3v,,JDgHt7R4oth E߽<(L10wn')l`ڭ9X!1|aӮTGq|fS\j6j(#!.?4aA;82c,O5遦Z$vKw~?|#,#,/c_ka"#aL֩"" KA☳'4e1`HVCKF;wmn&WVӅZl{Y4O| Vi"36_4+c&?H8uGEQC8&Gh\R}s " JpTv>#^r&Q\ &sM(Wfwό c~/;S +nÈa|hYj[4>n,Lֺ,;9^ 3v%DžۍeE8rH",`E)v%*5n}.Nñm"Qϫreb}p` Z 5*^9 .Fz,?7J nw WLkr9@̔[LklQM>7]V}>#َYgyZ+%QpD>+\JW3DY~ԇ &&m(%XQld`hə 8ʿ.%5RDR~ahO%_V^)Ne' @PS_*Wjqƣ24®CafeFص@{F5p6H2ՍQ gTJ0AC5u~,4VĜHiBŒ5zVZn7`ѧOc*) 09 {.Jtu?-%x;]i4}FIw|KKM8. H^(2If֥:E.p,[{D|DWpCX>I5X{ ”XF89z$ЇMiU7 B9r ``o.xOuhg-h1D`T'kѧj L *wmH_1e2Y$|Xd0{Lr߯ؒvZqI Eֻ$B-kR +HakNGY`wd\qfu%0\$OW݅l-e3ߑ ,Ja* qh| y庇+Ma)(&UFSP0dO,zU s1BEbfYUW\4;8~W7>[D_}/g]Hw/߷@ggwKmt*晩J!j<*g1hUJx+$ 3im!H˦X$3ޮ1B6=7PɃṕ=X>fxn u"JXUGC"gYnطu^O &;u)B8*X_iG8n1\IehO Z2U$H$+ϐqC= 1%ah fIs9O>l  >S>v po/ƻzs9D YlEaP4DW7<:T.5z/PI&dMNRC%QqH6pԎس|!:#G`Jo<\0zã^ FZ \dVÝ{ur7s-n+m çQaؒeA Ff8+L9*Lh3 t}6r& фGX!@X\ }) %#r8lSKBĥS.1#,epo-ze k%if,q8j=亼@Td}zT<1v[UtKVC!I9]K5k:~^uZ8|8ԬvJI3'9ks٥#HVD8TEB@oFd"@l #{]2݊,7A+/SkEq`zD({4-_ \6IRjp{]%4,SbAȴ [!KT %phutЌ%p Lq- uӞAw_2}w\"E Jd, odt+gm^]P-mӍvG=JQ(qV]Dŧ?@-Qv "N8f`b?^I]}vgi:Ja\S8\ IH҇T>YD\r \Dv3^if퉯?Q-y#}z;Uؒе>Ԡp%ݞJKdCÈ{=]ۛlU 3b|XZ%|4}wg]ӃVϑ{ Dm.3^31J#Ŗs6\ ݡ9Ȉ=wyjb ݏkfWڡ2}Ϯ]Ҹ{4O!7[q GL}}'ф(+*U`!'qL(8c*!! ZG㤉`qE;"2 .6~ܝ ϷnSm{{/gKt)o>"dD a9B̢d{HR雗qޛ&Roz|L_|$Uv# И/6i \A{s$, OzG8͏&Z8I]]Ekpsp\o./ր_\_^TM*v3`7N=5X+>oqgEY.7]*ɝש [EFW'̶.-T1zgskʬt@(u,L曝l'-#q9dAZK㝒NZU^elNX*!԰q,v%pJ7irMzϮ#մEa?۔ǴI**9h"לXɢO, ERD5,yÙPvN AYhT`'j𬑡.홈00\&cGr:A>2&$-)^kp9+gF/ -60[:޹aهuYb;qv yuo;FrUo+Z{i p]m9 ;bzZGRheHD蟄l^F[_`,L`WWG޸em36Ї׭.b! Ľզ+i,=z(ɌV1IJ>FOK aN;o{wZQt>wUL8m[X2pm/Śe¸Džq 7w3ڑqL-,D,x IN)1X/c H/  C/?kT+T3Q?&y[~ҘZ\}M]WV?VҮ1M֜r1˧xł29w/: 9X"{ !3pjokM٨(LowKsBpAR&th wZ[}4}G\] p+P}ӊc?w\z("];YJbfG|V[ڮ砘x +kS*`d0qZZRc&:LATY\c9[w‚C0HAc5VSؼϻ<ݢPj?wザ :LPi=2FaD''SW/v0z 7Q-@J_~UTPV1U~5V=_~WT﷓V?c~!F֋(]֟v[(Z@?l7a]; qwuʭRƱ]itP*jW0ʳj95R3f:'" K'Jf2y%|l&n/^'?_2 R|nhWH g#zF4 ;(puڂ ^sBTS~> u|_" GYBUwm_CiǴ7|M|Μm&}t: ڌ*%q2ԃzYHJ|%"],;E5 H{D -e{>r 30 D '@aJ}a6MEF鯈0g$)%Zc 3)b ۛOX1 TU .d  gR>(# 39JR3B)DStEG4uTT$1R@]o;*at)(:!u.?g0FssIl1rޢU{X)@dKE]}l+X̤ P=I)26"2 :Ȝi5<#ܢdyY`W㪺+B)K"ԅ3v]R;"D(9.͢嘩 hR<φ/ j1צ<`|=~#OPvI .ӎ GQAr-VqքY A켸%}-LHI_0&s(!SDP!cnj_y,z7#U!k]gF+e1J|i ,X[ܕ^ p5;Qŕ!*>dbbiQi[ E̗9A>Nk-@VO %gUcҢhUɌ,{R(e`jSLq`u(bbCA>V+4kX8.|^zG} -cbbOuPPfɔo$Bcc QL-9NXic02CXDpG,HsoP J@' ׂbEcBpsX}YX@^bv3-{h3=v̴D"Ly;<-sԮs킟n^?MDّtVQontT1gRE{t;Q  lSNW 5bN@Nۻr`0%C3f @MMVnQ'V8 fo ~e+KAlgigȤok2dbqzD tJJ70lLa&ʹ`r0L> vIrjT sXݨ8^~Z Lo֓ƟF _jS8/\Po9<}ad(hE bs؀Ly`d `N-`6F*B]À9- BJ AWhؑ 9NϷ@PUV5\@+,BGS%4hKU: }eRNeqzC =г<(&cW>ϦyxbK* | @.;߮¶u˱;Pkmt8 Mֺd9k]~W`[98Vz򪚓q:`fR3{7ΌHI {B9]Q`[,/xM<|aܝNvy;FcyVdo,Q :ύkY2Piqѣpwb$yxյr$ʫ=gwx;wQvD{Q4,&y:!Qh=aУG *#Re6CdWlv( `="V}76}$CcvcrA+6'vG^0~SD.;⑔4tKS\6Ύqq[šx1.=xRBZ8< ̛nvp{ 6RH#Hn3#Hw&4wwhq|}Zh)q*&eZn/3Bkꓠnn Rq`5ottc00?a8k}tލg,(#ώKΈ歏>m)e#SFq/:`Cn_)QHjsF#yC+75!ĚRv~dSԮ4ޤ6;0\U>((1_ RΉFW?&YB}T%'ݍ,sT9꾁F3(2ԒOD rԬFCE=AS򛭛cyzLowgGԗD K;] H$   uׁ ׍L5m .).,AQ <0Є2"Hj(9Z! \ HR0,"QOzs-eJ`zEbQͲ_W`$THV)Mg^[Y`D@!%#  M .$A( a|L0ca$W)gQPMu"b2 4 msc$.ܒk) &וB[Ys idh_YU"STK*َjۙg*+>b^UPx>jW_ΰQ\kA#\COTDE~:13Iq;*=wF6lu~ְ%+#EA~tgZ8Qw_N<_Ļ%>GvGW )AhM'(BaaF .AF/Vy@`J7̵ 7vb<tch649Pt]M&qQ;Qxg0nWTuM^ mͭJ:vǗfW@UXQϏMG.LGi >G|fJZ_eKYC `j1\+p#bXgp_!xӍtugz/H87\#g]Y - 7U[K|[ 'lv`|^0G\z%H|z>[Yy-Hς5΂mȕS̚IkGSHy\5|'ɐ,obcFJ(y`ۿ.lvI2ݼ䗞6{w^h2MIw~-5ܹ1SspN v$_INbNw*e S_ GRAD^Ӳ|V<$#AsQ؏׌;PULmw݋df"X)V>H3k[!,k!Hᨷkm 9'\qw! fC8=/ "Laf̆ YpJ<} Ҭ:7׌H+ >dnal&;2'Q>{T 8@@:NhQ 5oh`/뱐h6JH! ]g@wp]$^qD,cMF濿m7ut3 nt1i qh?^'%1^w> AӢNd&!|Gdxә2gy)G$|?'TMnz߬]Fu07o__G̿]z{~7}<)1QNdx$~z3p} w&h.Cc!{Xgx\+ݏ0&0JIO@41o_ǯ(-^) 20̾>i'HO?f Z~?OwLϤ8/WE$_,b_e|m)liܲߗ@wf:#PX {J)P, }۳CB{֠ 00ZpXan޵&w}Fw@aV6]2÷@TVqu l} p<<^幤Seo~ck`G3s ۳3&XHPEsAw,lF_T SdsAR*qA)mm?m AA-}H1Z"hH"*BJp" 濬JN8Oe֋zYJ۩H;9tr۩H;i"+7 p\DؗJ1{Tғn p(0)3SDQ;%<Rʋ~a2L6/y/OedKd >J}C4dQϏ(bX慹+l,5'".CC{d!Ic 6gC/A|e lq2QWˌu )nRlMMöfWU[ ӉބKsrnY-E -%34u=ހU+nGĆghTq'a8,,U^#Rr!=fVruNP**n:Eh\B'0 %M 6(_jL)Jx(=%BSgϳ:2*|my%wчeoצ`ؾ~ E+0ًB xDPPĄܸ'U1ڐ+O}Rq8VjlIubb 2rhLZKc+$/L,X /THsOjBY'5wݹC#pG̋1 cmIHxM`̖.EW2g $CVJxp0et3#w,]|g1x|匮\ x#w D f>eQu֘o?ʹvǭx͌--0`DdY`k#Ak(jHyaʈG 8[Idhz%QB"1uhQK > \/6z.hG4E~@;gGnGa{|ACb&DjyPkF5B!-XlYlYlYldfX#>Xd-8$K mGS]+l90Cj+wV;$-6yDWoxoOZs Pe[sk:Ar58Ai'\3&.p3BSA?spX9猬j5z̟>oT F3v.YeFNǖ;st7*cl* _ a<} ٛKJтI mX(< r"` Zi++ )=6q^sg֖s+礦;G%Wْ(}rڇ(ϖf{MԚOBܦ;ilTolfƯwq#\"Fh(HQJHSi!C)xPX\pΗVDԂq.}&%~νð檵,^bE|(tNq[C| ~LBW%!|]Wm[e7tR\zf+\",Pa6Qv ͻs%g(/$ Q"&H8 }1AZ:kWcL(+@A@0%h+s>SjU3> u,7ȕr@y" zn ̔m @L ^- kA( gK@aM&<$ PL)8x+SK<Ō"*DLiևi>ZdbQ=,$i)8̀[4(`EP`ʼn'(j! yYv N҈m>-8qY3Jb(LW5sT_^`%ta8}ڠ^hSck$[$ǨSB9ΪYԴ5-Ei)5-i2.8aXk% @#!4JGG YO Ms@-P*OA ( Ju|Kg< 4$* meYHHz,$jJIn00}JO Yc?6鑿">ݯ6%#.>?i;ON 3v~9vwu &xbZG5?Eh,=/-p RpCT/6[ǘ)x6(EX#֕x|'%1% Yk39AmLaw4kg a3 R-cGug[^c ( N9l6BW#1TE]3\ׇE`hBXHg.;Q;i-xvJ$F`7ZΎ!4g3*CE`^}SktӉtӧ\B=I[8]Y/ [8`c,cU _G+>)]WVīAL5?WiyJMj={n.}:zqw>oF;x2Z'M-x/=#_} 'HA kabv;-S`P * V5l6NEZeO? O֒Gt}D-kDڒFgGUv8TK{l DvEQLNF3W8(eQն= R#aX[x81b @qC{7kWVȸײݏ>[ީؾy~)FIf0["tC"_b-jSˉP5}EmhcJ,Fc*x=*Š{X 93R f9c.*FE.urfdK$P['U{ny.#=sKϘxLb ̓H=nrK$嫍hI94 L]IɈ}zBꃹK!Bb0™űѻFT{yv~ד-k25A&36H?AbytZ{o) RrAv {X4{ʞ|?){r{IE0"tٓ_$gOO" h6ىj'0X}ÓVJPCa4Cwo7'9dX7[VBJ X7ѬL&QcM@c;V`1ْ]+~x7DwL_3@Qj[ ]gw |Z> RZ8^_cFvp|N_Lo\=;ƨFy=oc XR3? |2qkȾ[xR"wzrfկ$q (}<`}Pi6}?&Քo{jVVA:'y,SB&ݲv5DLƒ6=LA E&?%'J8?CDpݝh=vW,roHDz!g:'(%\{-U9=Խd8$PZs Gc6&-k*#XD&w7#?bٙab_͗;4$U  }qOI}v4O"24䦾^VXKc|!8ڰPx gZX-)XJXYR,6PGP JlwHPV,*C&z^;p))8NZ= ܀)GYY25K f gO`<+prp\A&HM-s )2=uLnwKƻ5Č'qm( sR3**)OAmH~ux9m;oL/8) D^8$-89ߛbj DXA<;r5X>Noj8@;AHRvg0 ZAV-dX춈W)]7/SHL{|L7)=Ԝ╷6е"s"gu!iu!I?TXj{F3AT/w.{0ݮ B# v9%NO Tԁ~+IHߗZQIz-WfCJ`EP`pY;906wC!I^˼*~28[& ]pTg`[3UytoOp2&)yUx$</=_vENyöt9Qx[۷\R:iz~۸[%iɮ9ZV|yQ|y& a@cƗryoU'xћ? f%軧0WtSTKEyIko􌉦f 4i֏JH{apdјibQmI6gQGQHtTHMhDrt{&BX݊Hމ$9{G$H>}ߣUAtB^P&Y$’.ȥ~w4(z8CCJ@15 VSx:Z@u$AT[KgAF:e-V &VZycaLDyoHi\+LDx.D[ Í뒖X[r*$rȚ]1Vq0 j6x&H1$]$2nǖw* yVTq& *ټA 'O1]h 9ITeqNsr5T/X9*e")yسLh4-#;}]խ>J@ )dݹbu=:Odv;gz9_!2؝-vF@Kob1;akyvѦ$"%ɢ:Ȗh-ՕqddDd0jogk]?{+ n:|ߛ*߇FMmzi= 6^^_U2Kk] ֝=lٲ Yo2z ti~68xd .˞h|vW}e8 z ui}sUaP. [hb]/Qb^ IͥnԣyT ['Uy<*~VI2>"{H$2J'a|ew|r5 tHj꣌J0&0f~D]|Z oh/.r7[U@V= wu?'e۸vqtYbe@5AoXuM;IdUut~|BZ'jc͇uz@=$L-s Gi!G Oot$@PǵP=R׵*(SnPwc6,#Bra-;J o׫`,k+݉(|g>~ZXpXn4FnIܬp{N,/b#ts5vwS){q_4/`Ndv4xmN\^>D~B Yzwuz,N)ǫZΊX>p!1|m`Nd"F{ $ɤ$*~ռ>P"hX'd0ӡE36vɐřYc,;,ӃGB8EFS0'fpL5])ey8EJNRS$\i" PqieЕ.)=\lsW Y$m+{fx.},oek(4u]~G2\܂v&uiؕ @Yo&L3"(kt;42Go<:^ʘsD &vn\ʊh9vF0}5*zb"<`Ck:sC+ۛӓV t -7|yNI ہ<|CxruRl;:7@Ȍb'{FTZf$i{^` 1RsݷGK;d&42D4π5JQ A {竏o#$LlWp8 ](UNq) C5ɽƩ+%35j: bԄj$ԨwϩQonQ#tNQSWeh:T "E- 9D J xpfe$ :O)aEE!Ŝ9?irb0`{Tm}/_ĉ\Ɂ]w!@)A﹞S+Iin{_уGlxX}"Ew_K;՗mAB :f*}S9"+ g>Ϯ?#XvElo?;{]X7Ohg=GL|RSė?loH3Emiqs ooraU*@FQfI-77w5rU{wg7y;=ʹo.8. x?i?if~\\ţ6̺!w&z鹻ʉ4apNWkWd;\G_{KQwaj~1>[|T!՜;'61 +VHO]цUN߇O[27g{ڢz7RFys#i,L3.=\9v|Y+SǘbԯQ*z@PJ%GZ>ooh֗zV%}?vj6ϖnX۫?ܮez`U2_zfgn\ S_j\=xьc@RyIOLS6{џAR2aӧMבFuj~]wٓQ~ '[rvraQ$go\*\ҧ͏JmIÇ,l`}Өu.r#砧 ^C6b!C/ őxyݫ{%TcPً~"iKHftik}.P1S` 7',:*rPXV8uDԪ?/FlZf'&PRIÏ k  ݏeE.GIɮKWp~Hr!_\9m GP:>6Ф8̠jW>f,i`QIEO58~K. VDu<ɏ]7"~ږ\ Ñ3 #,@%BNo 06MFJ+*h0.ܪEDӚDܦ R xA,pqN?Q3ډTHф-"ƚYslwϕo=x(@xoЃ&W:$$ 0``m.4^RIKBbQ4 4Pb-ҐxfrS*)}x ^d`++(SԱZ23K.I]JchE(@OR4S;ld>$]oB$TDŽb,hɚ+0T9(:TADyy/@6FA" s#\B"I  FoVHFEd<.*15:hJ#w9 w5qfjB KZ*Q;\ؒ'h(L?1pB>z*S8~3GUdCŒ6M(|JBpaZ;M L ( )d R؅V eՋoqyB%Zkޡg1&F8lVĔ[Gn,)lPtᡁQQ`FcaC_%T[wCTXm#CGsvi1aY_]~_= w!>UoWs`LҌPpR /yſ..#3MEZ 6g87뻋Ƕg\4@z|zv89<]ScA=K@a$oF˻[t$*29!5^n6X}"3K);9Z!S.N\To߼]m"wtX-¼&1GvV%LS.Zf@l*F5pH)-CI<0AdSKFd"KEr;H7R^tLDL'zM_ɆD]DHƸZ'HP\f,$ΒUB>\cyosuchyHB7r/hbJY⒖0rOg%րFv\JY@!~,!z0vu"4p8n[ozq\|̎ź(a03VSԖfscTR-R1![glұ\|P)_ }ۉ`> uQR)Jޒ\UWm;L3C p#w{}X&UFܧ{d֎^ *zmb>Qa5@F3ZE2Jj0-z" Z)!Ugݻm;%!heW|޼wf{V5voWeiqx+^~F',oJ{ȕ+ekxFQ-L9Mk}nH ɭH[ݝJvm&kBvRH"@jEKQ?aūo/F %UERU2-VFӒFsC;J"i GJ"i @o1y5D,N_ƭR ճ{OGL[w"i#D#'r+D):>_}.fo#tdkwc\c| ƎA %쯴snèC_cP~ +U95 :0dk6~FT+C>Qq rV!̥.K*+ٶtw^lݶaB|2,(UL?RUX +w"X2DbAS $gr*OOLoo;nF>VCӮvтLFV!3Fc XH :";丶'uվGM:lhwẜzΐ.7L`-RV kZ&""m\`bs3+ |^B&$bb,T 4x9T^!w\w?EfͣP8돏 1(]S]5,m'X`j||]mKn,jǏ&,[8f3%T(=b–} {;8#eO^/)qx]-2A Zۡy$E3i4snè*ZSRfs+aY,݂QNڤ52U+ԄX+vyPFF5wfq̴` 磙mSl8ƚOc+\Qq(c*_` k*_ qh9>V>~:B RuF-_bmG#he hBZ*S0H*lCy!9VD|VMB>:ϊcMhoro7a46eU V6'qZMq̑q\k`Y+KAŷP^_oQszkZKy6?~M8ꚧX3]%#0i"oj% f@شL6W.gKV&˗ը%PFm_dXD_LF0.zIM kMbvtSdqzk9mwUa,U Cۍvh)/<`]^`qh1; !JAB! ٟ dLT^QT:K6^4LH2Zq=,T]jדJm)JfuopTG)`R}U3SRm5R舲hR @ꎗRd` x% l6Фt29M:^!NmV=({4yȣQ(NP6 %`DͻtG1 s2;7i(MCo t[eI@lKf }N  șQ(2Hw rP`$l&*tWj3,ZlxDhBlPla ˆ1ix(˪61nګ\>QȜ[cuGdؑ b%% ,ZEfW{ S $j0qĭ1բ䷊ָ\qخIs=nj/JD}2`B->[|nc&+B58,WdVqNLc|s!b!g ؊o؟NLE ȽKIv]x,;00}'O;\ lJ;Gv!a $xTu0"E;Ӵ98 Y2JbVVSe1N5+/bb7ӵ%qk3)AG]x܏˛-Nt5YYoiLy!|I O]{!`0A׈n]w"%RrL*;at4V:krnE(a 5P [CPnR.Rɚr<%u=?M8%f8Dՙ5\ F(h)JJt>sthg)-ޓ|vƿ{ gk;OݔHX2dNXϹ6clVۅc:{z6rs]~T/J͹{1Niz mr]NlBly<\9>6GۧX)}(F|b%D|}0 l"b$ZDL(!2 9xaz=O+a=o_/`Qxd0y  gw0X07)+t9`&ݿVt\XO {q2@2{sbw~Fn[ SlFmh1EzZe;hbza4څНI^{yͫWOyxwox-Xelۙy奋N彺~wh ϦϲMgxoRAo6uS$MJpⅬ{;|>Iz˨$tj59f'L [ YK\鰦nq⢋{c֣ݹtcV9Z|y$ݹ`-M؅ւoui+=O\kPu.FQ;skd_|`݌gJ{ܸ_þ V>u & a;lkՒbFGjm0Rͣ,><)X+ϋ/!Y{[pSڥxRA6.ݕ[!S@(#t2{He(~˫vY5-cN#WρC:(ccMM7D|M7D|sJ|SOJ0I0tJ 79H)o -&>PI'EqRI'E)$U dy Eɷy7sne~οwU> Q(%ӛbw{Zwwg]ab01`.$^tadܝcX*c0sJO3%)ag+C/h,Sp- 4(%r |kLE sf>yՉ!K'_Q56S{&wO F0tbąJm1˅Dp&a6zan\uƚ\c㐈k KPZ[ߝN2r#WV[I"ob3urry^ zwE\YO& v"C$&+9٣jNJ7~W OߝBiipW-4:\GK$It kcu"% 6;,U81qr^?n6 <*ԁR8xS#;"2fh2?Vj &H!8*0%?FI9=7%Ҟ}*Jh`dE&@4" "08aT1a)#m&(p #eqk'$F[cFe83jų8d`ur^DR8R,Ò:J',`>;psEϻZMswGXo5^fM9Z4l'EY\ -።K%_^Nͽj s R*!ީr+\Et)Q¹r~jLtwU#fA"8ШQZMq3գc"\R'RR,I:Y:)BR\g2L0Ӟ)KX,P K%D +[ʅ34#&\q1FT Yse+]\ViSjK90MpH:`aa WƱ‰\ѩ1reΙz8Sώ&#gj35rFpXkr">x Ma90-xkwзw-XW^)Hr(sa@ϻrJvLP(U G1~]оzWT[ҧ~ֿ7t]1U/n֣_,[x.ƳKnFmK/f0\Ge0K ;t̻p* vqK;.:Y"'RrZ&T* Kfj%1އ' ʹN2W-/Rq}k`=T*B7Y7x4˸aQ1Mka@*q "Bg)3Q5\R<;ZN0'&\8v4 L%}+_Ig].ҏG0oFo)owu! vÚQD?gvy Ymq>AIK64  6թ'9 O͑J£XT,Nthي9+38i^ R<>V* Xnb=X@UL=@]TRe79=(/uUԁ&]T\qmTQ򳚌g"T5[h@E8+)hƚa$V,b`,JAc"ƺ FbfWя4)幠ZNfe҇+Uɑ&8* X$( s҄iN7.UĖf3F @E|<Ӯ`Dyunu^J%o[L۞ _,"3n+ڡ3{,-wqH,q)8(G Sc a{}6Q3gݹN۱XIfC/d4n5 fu=dE%BNXs0/:|:1< NTgV%<(Z_YɄ~'x2@[!6g BATDj×6X4)MyW2#*K[MX˔s #%CXM?XTQ×:˜֮lV+E]n!S)q(O>;4It)#hƩY#e֔u:K\&-p(ƂkD6)PAV7}o ٸ5aI9OU)gb :D2+4FD\Y)Ļ7P¿!Ho߽Ƴbb LЁʜoכR՚iLBC (D tjb5'42$CV(!"s*)AC2ƜVM,29$>YȜvu(,#s!' UX>h] +@AѱW+[Xq_,֣(ar|MT@'):CyIf O 0FD9ZӸhA#⢡MOR/.jR(EÅ !ᒋ!t4h ]7-MѺ"TC=hCPFbK*!"m4ejj"KZH) cOR1,Y1WR9q"$ϬTpz{'Dyӳҭ-) M0&UI©;ĐI$b*kM@%nJD9 İHSAP,!)0*PA#zAK;a=;MZ' 5\"8))`N B; B;G=Hc'GԚÊ?F0B!.F!+QW$SLj )?;3'ך!vLhyՕʾ5d'[)b2tη~M ~v4p4 \23*?{7w?8Aݐ,eD&j^iͲfo\8u/߄龜 st6 716_I3jҏ&Qt T9:!yS@;A5lrhd`[n4_3S{-"^E3;M\o/P/:́dpe&/ @lr9{Qxdҋ^gYY$,Б;3r2IȯI7~F?{C gHijTD2+S034VKC ' wW-/q}k`=T*B7Y7x4ݹa0,A@LlHe0DD,+&)yM1TpIJf$lڕۯ.t^˟Q~pK, u! kl I؉dINlItOg+RJ*0'G)q#o/kzVx_t~Io*LۣјTcMH2DUK#݇Q帵N 6eu>n JP >QXb=Q0cEBFgd]gsr~ |~4x90K:Feu8g-@hh#(J'&Z/kqkMDG\|HQ>yjibwfttBL\Ab0~c0qZcDBQ#wMF6aG"v~XG6St`JGI7Y7PHW %0~K'xsx_{F.+[g os+8+ӓt<.f86{oUe L0!XI"ih>!kL! $0U8K8W/^ߛg^}ś!Xz5Uy\x՛j#j9/!jZkOp*5 uu43,Խ.gNgޭq%.Vat -,trdhfᘿ1o䝞yvk6*PLWnlcMg_>ܶG)XS:x2;eaKgjgANSsbR[ /Jzblz96͗ޚxU9'6IO=)r҅t>}.*PPz ~Ôoٌ᥀!YjogA0vg<X3;#E:ASjNMԛ;r2́:lelŻ{|7=Mo7G_$ &;ansiW9R 㬏8I=NqRz')q$o˙YZz3rK?mq`o7@sn&"b y9absh`z Kkfq ZM{ԑGސHHwAIGF'ܷVj@Q;\?B 9޼2(%h=qx\?_B_98z` T-W?ߴ3W `s@9=PddM(;m~?h+{e+O+o Z2H(!0qQDXs)1`I ZR(2E ^ o(!TKd4T@ij`',H==A" p)n}HmwD(8 PBHK pFA72cF #FHzD'T)ĿKJ(cs&14ndRxLIk1JUP;ju' &M%Fgv#}Æ$8"ĆP,9a ÄG1:BrADn"wRQaTcR c! 64qdDzZ8"WU0)"i&07AYtM:s Jb1bOv$L$IL~_;7 :uZ`J} N(cp1gP;= &2m\ZLj~w?7Wv}Z}غ#vv>-Ӭ &{^6PRxroMb[WdQbkp`R6X+50lK>N1L1>heIg@ 1`i6 nf}jZj0*U$n"T  L]v@~U&UKyO*=NȨ5;M r}7@ t0`\ܲYw_bDZ1ݝ5`=A,k߬|(1Mz8}y`{5FMFQcQ|yN<0]@7 )%yNj_\$@.)j]w*Trg'E _w t>*RN6+&_ƿUGd :kJA]w^:#䧙uJ̜o9Zbp1l<9A-Btx9*b~UKL r?(=Wt|e׏ "|r!?޽}6<] ?1ž[8HAU3{rjc ٥m:xkorO"8L-9UX_Uxg!7_)щ !:\mv_嵝7b\hZzfyf`|\Wg? y).ڽ h`Nzfmlq[X#L w)V;r#=&8U 7L`ڕz޲xWct:%{LYߡvy$KxwKMLcJRSK_"&rp(KQ޲wҖ@lYVr<2j˅}+L !d9Hh tҊŗ\;Uxx *ՠZ*Q"~9)-)i{4H^GÙHaŽ"b \i~d s'l`'euݪ.$]mؑa}oHbpɵ/rjk&NoJ#Q)nrp@% wM 3MU֚txE> 71N)"*6!:QRyd'lO\:w]G,٧(dPϯ+QNc |\2bw4S-063P!FWy=ʹ$JvCyq~G͢\ܴJ\:3Ԡѱ -՞4H-FȎ" qOqҀfŅ!+ ͑!c@TdE`,myr-Q+=;}d[F@wnRt>=|"" ):.Q(A>J[᜵4DÙowpsgO3T0 C/5gS:qܕ(VM&)@U%'ӫq$]f~UC %۟aBZRAˋ W>WԂ@['YLb>]!rD@ ›Z(njo[`ܖ `,o}2w!ˏe!m8-N:}bCn C!bX f>M&TH%Dc'\PpEA"dVȄ f $~,7N>g3HFx֋״!w=kVqۼBZO צ=77~,[<U<J7ib.CO`w;qRo%. e) ƏlbIMRVL? PwBeRR}qG)I09^]kNF瓷Ej{L+{rF K/_RIC\ne/ޚ9C[%a/@$jDLg8㡯M2,D Id~qz+5Ъ@juhKx /`-;l46 |fAȏʪ3ܱt~`HrL:6ԿLK*O: HQ#F)ZRS,Y-t?x'\M9henb'ڊmnaA-Y "޾ٟ(-X y ʒ@"p %141U *:aF$X2Px.*1GUc3*pSjsMSaYpHq  3.FT*~kqo*Y<$E!| MOs'Gjj {-O:jZQZ5R>kya$eJd( d ߼pHʤ Az?ϺUEI.E $pa T3mc=6#H Ƅsu(Kdz鼨ryzP vֵٴn"1K,Qd7QՂ (\%\4(sIufZ t:NJ ױJ5/o l7?S;r_hͶ?cO~Or! ` bw)Uoӭt+&uCǻt+<h)& u{?hU]O _?SCE|c/7ӤLo,b0E*o.$YX{5n}YR#GjQA3arFa2$Igܿ&mncJ-l3em{#1#GKL ,]C>rbP-E-^k@R6u]y2`9vbT-7%>yH ۞u5nQ}V:홯?j!Δ'I箓- V}B>"JҎy:Ă%-uKQ}ҕ*A*˕Uͥ<Ѻ|"vI;eO $3 %, Jl!7>b-y%S@E-fqQafp[~fE|ּAȈÿ:Uˬu՝\ݥZ/_.4rrH$-6pTֿaByj38S KCΟ,_ (v9/h[wD$8w.;. $崃X0Q KnڟJvG_XpL\VL+#kW-wiF:˪0(V:G8W꓏1)޵6[rU9Jn{ASO F ޅ ^ /3b ^V&) $D tC%maǒ nVU6x qw1"sLyP$pN_~+0}*tuw, l ]T<ᦀf IQ+9JưY6'L8\hxwOI&!ؼݦ5zFGn(Q[?1?RrĜ=݈d|WT$Ӷ|8'v*͑n n\v6uYtf>SRb+:pZcUft|NB:׭ Adk+H^43FH|y0A,Bh2}IK.SRl*i$LG#NG)0/[0Te9Q{Ezz㯏Ԃx%-Hݘ/ X) os1 Vz*>N fm9S$M3o4zXR:E8ꑵ!v&q-[Ua!]y1,Ō  HqKx $KTK]ܧ tޟddf 8>%ZWgUD> _mTa_^Fꦥ?NL m*)!~w;iV ? wj=u`IG|*^o}!#>qD5s/D}Ӭ$ t"^C€V1W9\]}1m%Т%vz{yEh$f`Lžh c? 2g}߿Xq1 ]ǛӍ]`π4IG(D/~۸QX!(DDD_(ӵD(܏a˺n0*6ǡ (PFgq8~ELD(԰I$%Db@ ]c՞uhpeyY~K%q}mF~<šd/G9%+V k YRDmEȋE~JT&ӥ"2W|>&oP4޽gnW-~D/WO[5A7yTh@a~"3 MD "#L% Vl.4ڇ8ӕ zci:S/:%V"Jau2Lo4J,jɖ*45DQRuu#֡ 8/ӑh跩YŸDiKS{:_ovX1ey4$qBKrBP@ ,0X3AnS*-9 Ucrx:}*HC+"&wY8k8Vu_O޲mrq>>Nү>VO][zhXL?:|3^_~Gj9Φ\IL䜚x<[~Gd|Ytkvv.x=㩕pm6Svfr؟_O'>\I1&S3/a90[V3[R/?]??q'/¯i?WoGK0rĦ}[GJRNX{'Q,k}:XRSJ VIN<}y_ީG,'g'F䴮 '$`ptuqݢ5|&˹ίB حDIÌjݿͯ?$U}nfS|D#/Xќb3L{gyGu煮.w4\Zi]V=?m9>> )(14 9!@kKG!VӗȄƜKi9F"Tf7ԡ&!PPj0 JT3  H}_-!aC#,$ )IS 5ϐ4DuS^LR h  9eLJ$&  W!4၏QDHhy+> CYO!C=PH(/St|]`x/u]J<ͶRUFZH\IkV0WJ/S^vtk䢤u[eMl̃WZɔ|\OݍIq1nj_q͚Ѳ[7Q+5΃ݍ)nsPFtlǨuiEpAk5ew&$o.e#^3ORgؼ-%@zy}Ąqk\]Wk֞㈕ "gŦwZ=2phG7JՋgRM~[ȴףg̛gwՋg+c pCH,u9\ϼ(ɼ0QC|!eu/֭~魊tOoszU9]T! @k5F8bǰܶ m'ppJ_*ޒ!T1n4 n= Md;scP4Xw }SF}bk;Mmݲ$lr lVP m%4JA'v) gJq~\ʞ(;)ŗddT ().-KN^קv˨ ι Q#9x7T3F{8Y]egrf|X# NR*ښ:b&߄Dw?j,dew!$ȟm 6p U=8Z$ QW#~Zp`],@@-b] POLd0%l>-֜t}rd\+ f3eEmł)(w]Nq>;H}|mJC5- d4'яty 0\C8J CYq@is `醓n=&dj-jըftsfmi_CMDha? dHoSoYm\^&xGCkKS_pFwzpJyJ32Q"/i~5zh+>?C*] #cL(z-f~Rgf "o+s$͡^('}iݳeP{`ȗ-?>RMoӖ<^&TNzl:J/*.D+n7W1]|0}±:I*p 1nTG* Yz0kZ_P_Re-x 0Yq\ .(.>(̀rd .Cvq7 =`K`Ah2BZ8.waMbEg]\(}4jBޒpq7nF\Y#5m3Hh UYa^Q7%I$hNtU\QIf<$g!=2__iT+A=΋yo?6֡@3X0 `(-7ZjoޘR@8OI,!&Ҁ_J( !qC{D:کt (W[\76`-ƠK#k5#P&܎XSlax.EkwUjGҨE_\"Wg#aF &B%@RB`ab)P҄ X#G#@ i1we=nz NZ}`2Ap=6$VeK.o[KK#Yl"e25Twb-dp? R]?6 y-1ef!T]cuBrZJ)tV6_BM+Bi^E-s˩0|=^?NP! xE^ݯ~p] "4?f9oq 32źTw+.'|Bۊ=H+$~7Qz@H< :k$A#]ZVjɃG0<))>X5hZ(E== HhJ%E_a4F ױvy|=,h0T?l\wv[BWCka6]:PY]h9l}ߛX.!`valaFc&Eb|U[tPb.xf4#E`SX,;O&L`ڱ^} \M'2Fwausa=FS2R3bfoS1* J-JkxS25qj5;9>@XP;,õ~ R#]^821GY/~#hlPN TT =2ؑ hdcB)v^V9UC '0hgaE=EyFy--UlN&|K}l(jDLOo OEB#6K _,2M[%(GZ`.E;͑r"[=G L"`LHӒoL:}ʹã;a2 *rTԩ:쬭E+2IoYЪyChƀ#8X3a*LC}\|u`F#F3iJqe$kX@spܩSPY@Rq4ؙݵѯE5uM7{wa1%3H=rJ;2RbeuJMylOa(CRIkv@˘@"FX#mRd$e0 iT l皕KlV V(MLW +#vGܮoܖ<\~iSLx1 M>] =Ѓ}5[d8kx҄!fX"冈 Y\>;o|9ZZ85`l V9L!;[}?Ϸ{yf.ɤHg-_Sn*l]=.JWF? ~iNXjT aw P*:`>cO>MzBVԿW_<٨nmHwZr?S> 6S(7̲#xm&}RPrQ$b)nlKI #KE2(+qqtѺ/F#6 UA1-a& = ǰv +,Ua*Xf \:˄Ba)3"A%bp 4ND7R}K:EBL gǮck!Ela!|Hޛ{ӬW7:[=ErzwSi_j&=PT)'ƹ-GZL&5(/L3IUئVΛȭRYۍ+u:۲mo9Vf!<ɼC2 K=KTPD(RNc}pc^y@;(鎉ٜc/yl;-;df6c ml^eN>}'uy3Իݫ/V!)r{1y \p Zǔq"^XLT_WBS!͙Bjy|åL4.3l&G3QΟ"TOϺϞT iu^QP0}HǺ8<.Oj:&)9򎵚yFIM 5]rIkѷ>fwo}mJ dmVՠ>($;Y%AAnŒ0VO~0(g}wo5a.w|j:ͣJ+;S}'.4J 8wOo/'˅ۂ=[eB62â5nwO˯ތ#3+fFKY[R2)xn彯5:m}m5+.]Zq1j1.&[ݠ0G5gW-_q`ԍ,VcRԝR9߹V*Ma2.TҔtr9Z5;L8 rqen^Lr:_\pHNw_w (9]c7&s.?8;$)SЃL+D rND|^8:#faW+YC '.|h7&zfu>\|cwh9sZڀtr*2U}%sR(ܸt*8'O'= ՓZA _&h hRV$:|&iW!_nt>s.&hM( ;E|h3A%˜B'/<H HW=l?fF. #f-&ڔ3/`V0}h&q2zqiWF/j1woY 0V S"Y峂w5k>.rWUp$ ΅֓2/Qɗ.P-%YԬDܢ#bt\=LFS^2ͥym=#Ny9\waf8Z6j/ gXgH(l^:uuWhgcٶ:lj=o٣$SY2\ܡ:`&EjU]/;diaS]mi#ni9R=I* (IJ(fT]9U5TrnaEPO? MTk"E8+^1 ǩWYF(yb&#ަ4 c^8A:fLHDzZr)QdS^BQt2f2XX!N#&4ڧ Dɗm5Z[=<0)%=Fbhnx=EVޛ6 ;8ަ}P<9r\܁۴bx{sQ6=@BStSt&]f[՚@бbUpF<\L$wu`%q)w3n-M./GП(M3A:xŦ 4hrыb Dawʻ柵0z+OV@hJrQÝAO*sY+ -ʁKapq-i\FJŎ^b#J/4*FX1Н YK[ۿ"˻}}cs_Chi/jUKҴ#٦$#q-AcFxV [%ai=0Am eBHcM!0bXF<C(Bfu"kTQY)=![(RZ 6,FRJEܲ>eLAHPA8.D RU`V ɹS1{_7 cjD1pi"8[VJ J<, uX1rZmx" 3VN+>-IȁhL1:yAiL>hR49n]YŶ[1M[r"Z"S$먰v+;@Ͳb] 쇳RLbo~|aW (׿t.,]{-{esV>sr"Z"Sf|[) Nh}^[1M[r"Z Sa~v#TѠYl9n0vX)Dy5\B\Dd*v+]5cn)rSLAhK+-1n,GRlrREfMۤ sL$ &($'+ƥFt< i+ ;%7܇cbTHp&RFsT110ȋTU^ԬGmgPc^xn4hd Xs^%J;ϭRSϭ 9p-)iidQډ[) Nh]\ZhSֆAǹݨʮPҠαv2JN#T!!.%2ɦ KOooOs \l`ѐ#)<*-L8sRSÝ,r名gBrӌ "qNHQ:"\ 2,*)~E %1{ 9Zl'@; ih;Fkl'fcb;Z"j-ɼv]Ҡαv;Ķ[1M[r"Z"S8[~-PڭDp[wQ4{ڭڭ 9p-)i<]|MPĆ+&IV!*Tt€ ^Yo .JI2F[dOHr' $Gf%a`K rdx_) (9ֱz!;巰5mHȁhLidAB؋۝JiDtJXGuEܙv+FvkCB\Ddǹ~ԝJiDtJXGuE(w(FvkCB\DdJ4)q9ah\K\tT9E0Iz&7ysvb/5ؽoԨ5Ǎ|0Ó=M}N4d6w #2:~ &#PeuzMFhyMjf#|Z@^GK77yRgȃq߽zUxڍpsp4N4tjX!nLʼ^0s@H0I;+]O3ϵ  U}0Reہ oo޽wFWu;ΐXdK~ o0?T˘ݵSvd׉5m%d(,a d*[`Iv{,- KX"gɭ*NuX:a(,aIv+M,ik%S1xl-ed7t͖V R]2(6X"R})S3Fz"zxt2Mޱ$-JZVI[f?|sDi(A^8$~gwZ6hxky ^Cng K? 5Nk;|Zuj//ƶ Y(xͫj]?bNrp!Јfxm7b7i~AG6O彗7G!Xnnptլ8`5{p}uWp$` ~J$j>]3qa|JQ_NBZ/ Txp}.oƟ*?2DVkW1p ºLxm"Vjaoe*1ϫ4q Ҁ;^ Et̚lܧvǔSʮ2΁n? P:IߌA+xbj)~K?~^.-p .-|[xTE$kyGp5uoD 8w7ڌ"PY3fJpY#pszI^jokf./-( k@ޫ( B ͙j egpoNr͉Nc *#htNQ*#@q*F"|ϔg<RQ뫞>$1gY1.>7҈Q+x\q_z}wXxh>sI_^-Ѷb Z^T뭓ONf?9ds's6+kєs`TT39ʉiK+' qe Fޯ 5^/zg,u}RAiW:ыܙ#t<̟C6fy8C١ODǃlў&=^xkg[,=yzM O 𬗊xЙ ߣYs(8g[Khдg;[z[]TSH:C``Z*I*ȑ2NQdT /}8E%$ bE|~_Ռ0i1N5kBW (\yCҎKDY謰~XHcr)Y 5BɽKc6r26NFa3h,#:;J$ pxt̆nXKj]nU@?~R xX+Qjy1 ƌa2FI RwNTpQ֔n=ɸl (کmh@MjKf=%峔d_b}_Wx[1\ $G81K",yOa4V25gvy2UG0b.B5+bHE (h9ORdAel>`Vbw[F.͡s_w%bC*&e )=Mү14j2\xa%vp";Eŕe,xK2p]U2PRL= 'TxʜDĹKہITNʈt ;0Yl Dp2 0B솷%m]ɦ#%n)ZcG[fi7"mwzvbGjN c::ш9w+$$$\P#GJ*D# 8jA!QXqt`z=,Re޵6r,B%1`J},&8q|6YUbV"O5PapFa-Q;|U]UU];P7dq|Nx)m8R [(7 [䜏 9&Q'}pq zO]U"2KU2j H"aXYm:R9I*$wKh>w| ."̭Hxa1Q4ZFQǽ79C;n(SHKl|շlp6٫ gL-SQ" N@,hj@"Q#<ݲ $590l=%abN&4(a}&TRntٹ&H],04T[I`m%,L4 L^3wӊhxE`#o)g9!sH}bYۯx[APMbz=g jJ*; NHS*kww~hPxTy:݅R):^\m~߼hCa7pȌbeoک2F|5=X}=ـY|:wtȜ˰(.:cpۙf8e>{/y=?qR37s}-s('rG^m:96leiVIͺк{WdK|BSjN:ԤyVC>Fd]p6I" !<\,Oy0F9+fq:%!!zhދ x8agCH(4NI9W0%mD;6֢0jyܡX+Vc<]u} G@~>e6<}b,wuxΚ< mH%K 瞰# Pd"nW n#VZgّ"%8& U&/(Hu=g;]4G3HcWIyo89iд(QRJ%ٝF~Uʘ(۹35}PؽTCs=!D Z3[M0[> xT-am$S;0LlܜR2 d beR՚tI1su!ױȟ'flU֙gޟ%kVYZ}mf7`hLrV,.6 ҩ(4_]R4ؘ{*i)>!步2M}WNh8qqιCiYKP#6:ܓHח,@6\<Ǡ;!pY& NA=4;bW5iy4&jT+=g~DKEZΜ"NN,@7\@ʽ*&'>>a N'@_N۸}_oGt% )dq ?4%h֫NW댒4̅;g:-Ͷ3ΙƔ銆܆rpd}Y޶RS[/O4>Y?_&o43\&0̬YЈRrp4S420e^Yu Dk0'fY(vX *NK [NPE 0Y_V+}Ob@iÍK9fiH dN OqVTu&~F=_J iDV`C썻]@Sߣi%~\{c,kK#,qR8)Gɢh䔷J񟦻^^L #hNҰT94XF#%jU`6sl@ׁ'|hi$k[rq_SgO51Վ#|ZQV\٫ў1F `sͮ 0C7gsz:$he3/ .fg~?-wfy"fsংKxʬQSm YiԸaFu̩Ճ{-'7~11~sl/m`zW[_O`;216ngri5 #i/?.[`ldz8l6FY i!SN_f_FaBM^Y$&UD{Gv5sxO`tJH80> O qboGp( Ǡܽe DNz!'88^ 2eKRn4 ) }m#b6!4BpȴDoTfp`sx w,HZTIJ=eLK j/PI1?oA$<:iu`-VoG8Dr=:Gp:*$.H}U6;•가i ,NM@~w׾Lc*=OW1[P `A ޥm(VN/)8\n >ߟs{^eá8O΢[<0m*%MMFs"O$.V?'ip[>Mgfqq=/otr{cᆒ{=[{0f iI\I=Ɓ!. tb$ y.%ŠRml$mf.T kvi2JoӶ۴%6i"S"j= ߔ[Dw9eryZ. kcCxMyLbgC/!hE X "qw#zF"Jt,)s(ib9њ '0b\FKR~N!Ha ePz5kppլypy,C;*<2J(H2RM0`tD[jE5"Y);ΥS5 (^)/qZ! [ ..6TwQ\"̜~0>O]ع4zPl{ZΕdם1j5MxjaSM$lg7SEأQO(XŅ"G@* "r!IeaŬ{m#>Y)A14@d_B@g&i9;]*@B8kE_7~YNSInXAx ,( T\8sed:W-qVGDjםk\XY4#DrEK0& EG]N4{[n: !p416"TbבEmRHKۙ2e~X^=H;#[V bٹ@hB ?a`P3"-Hv 3m1"!Ҝ~Ÿ%nuD"/ rR[0 BJTK;ƍ!:jl"p'IC Wxk|A-:*9 ȨgH՘f>2 gF#  @5Dc*i%m:p,5q Ln^#\o̓L#Ɓ398)4M! RQOXCgRRz"G%O)mg^C5&)rOZ!ϝ>ȁ.U j t>j+8&z{VˠAWo-]^&ca80dOk4> eBMٺ,&$,3[}bR)!ۋv}hꧮG 0#\\x\.^žeߣko/ŭYz,>j+#2es0胘=-Fx/j5sxO`tMHZӋ{!FnZR*֒]sXZ 'V"h9ۈ(_&[dcC0=h  ds{r.?v>nk:~YN.'0b I%HGF]ٿne԰s=2)| W@U\6UD,qI0Н+{ͦYS!t࠯fT3Ƙ1V,Ígˮ 1gOip~I40FYgC KciS]q-dJI[ARPD 13$rm$j8h|ht}W_!Uz`A+Q>O\<~{wؽ=ɹէZYs6zobwwo>dEBk摞׌kXU|'b~65 ÷i?f}E#a%= X~$̲'G3*K!jڦBhk[n-f9>-4v8EkOգӹ|d!:Ƃ> z–ӒCl8$0YjLm ZMi βh!`Q$MPQXTipQyoN=r5HEdSQ($ 6 ֨ FI&(nm0 Cejzies(e㽑{<<}l T$(*//ZG'ٹٹyJeRvY<[Iz돿7T;EE1E$ 1 =M~7z<5K gB5|.][EVTC][3lݣ lۅ!T魞XRb;?{TY5 wֵK}3ɺ5[4< ; !vɺUSm]+ِY' J&Tw,lkŝ˗x=1*J{3p3.<+ngAOC,LVf k*+*L~{w%;'M6^)BmYݶ}mvd&ȭIf]PȖ$sufppQWy 4UM]'爠iL2+ tBn$N UZk/${]n[zv%w{jkILMuRJzl%Z<ךpc ]LuFXWlxeܛQa_8:zvT~&fㅇ3&ϧ'Oв5O2Ïzyŏ!| p ڟϒxAJ(FKÄPLԊ0"^<Պ뀘0Z)^g(0Qјm~oAhvx?Oon<<a,?73g#I@~RA lV1+Ger+؃bh% rg*5m}D%/Y9FZ i Z+hajX^T+!05҂ . Fճa()(K^1ɀu EPÂfFRZ`aTvAX;:.t~nla#?I-yq|4Th 7b+ h 9,04x060g B6=T?N ̃YLc iV|f&+2˴ H49{BCwS gkMk!;ς窬ݩ+*Q(vZqnT;#& <' SÂFi0:5J#`Nm~s/~ߛo'Οp.ӳ$ؙV0/2^3n`ȼtmAt X\X:KX. ׮wD{j,4ױjaK Q֟j%#T3)K*[GQ! rR#@R ^1Dj위;Iu ax7=(\E  9(:cZ\T`B LԊ] %#dG ; ha9!ub#p%c3A{NƎB5(9}6*ՠAx)bS$f^{l$޵ P?'7.0D'/ߢ+n|{UFY+!ke8|aSc,(>#pA+#OK-I)Xlq2, V6b9p` yȞ YRf/Bꑐ.Y2%[Mn$N<70B=}j.Y2Ei7L:햋AQE'tܴv?NnuH b C=5\nB햋AQE$xZ'lZeO4U!!/\Dd?_n Qb":cTnRZnMnuH ,Ux[Ar1H1h݀FRnMnuH ,o״Ev EtrǨ)ynMnuH ,oֵہxNU""O%oMeO4U!!/\DdJ[nCnNU"/#OK}lnMnuH [FCE~vuF:.8\ +tl9@5("j] cCeV]Q}TJceHBH.RΕ ;n+E5tQ AVP1nZWxVZnK_m"ffM1.fmJ2%N08h>E|M(Ns{On>d^o}zr}ڡ=]ST{v9vuljrťK;A@}ZQSSN 'FAJa>+vZjJ} ٠ CLa- FQVkS8>5a |)IτP4(^XUT5@j+s: bn O(`!nv^JK$TB(( &؀_͍{ŃaV̅,B[8XȚ$Nh)ZS`A kbL BHXa a`:(w wsD8˜[xv B>!ɧ ~SmU{삃$y@muU<"Џ!J@.bR°)T8nxW`)1P=WX3g =is 7߳~ NK欟pᄊ#%c$\.XMkK=Ot': 깤Es㕙q61_~~*Ql|W.s7w9sҰO/x ſ6wvfo1`d6{~/ 'Re.W<ߙ+LgwbR3^hG7жU(_'NAܧ7޹q+D& jE4` D T_e< _P_sn<}gM5>Rt?!׷h͏^OS MmzM{luRI%ú_v*X*奏XP@1hrUj8CHU@F_l`ؽIҕrEA$7P8(#cd#[:޵cǽ\w®D~0:s?@N䪾8:'F|/" 3d,nu" 43o؏L,߃a6e x!rp:`&`.)ز5r8M ZxiPT!08ePQF@3RpƵF^;4:VZCh=((_|g팺&$h"4q"!MKŴ'pw' =|u߿wmb:PF@mkofpr"DߵMviozY0RA0GΆ~Z326f7aN' 9dkk{@ZA7o&/ݽkU@OcnUD6qZOC^^Gϟq ?-<-Ej[ut\LsG^28h3~k hEbbt[rg*̸O{ǣ^-7q0й%3ނ}wasvu1@ƯYWOx}8j"~cs7\.F{h3'~+Ws[`㵿^'{EW^}N잦% {b3;fWY,h,} X"!b&);=0IR! &4}'tႌmSl\_Ox{5㘟O.l1}ꃨnjZJp)NdEa20[DZ<4jA^|z3N  0+,a<@k"ow/ ̈́12\,_jH JE,yV 9^O*8m;6>(lHGj^*ŭɊ/d!:U2UuPr| +oF+Kֺhk*-C>?g8K@Z,R4zP J^PpZ`9}hNZPd ÌQk-ET{.8FG5FF˩И5VFx-fSV_|#o[8 ǴՕV .ϟ굾qSxG^t8]6DɫwK;/Med"iSY *JGZK۠Raj1h-P66ϗtj-{+Tvj" ,zmqs !mG 0osj& 28 Oqhz9NY*.5M.f@f3f^[pOI g)NɃֶfnZ|"מ8O;yYR kcێg\ʆaoE*'*06˳+!w!hnZxf9Y-؂4Nq;fgkK(&J6Vi+TV_T֪_`hYk[{P.^954*Fŋ ExѼ ,Z$FU)'*b7Oo»*b *{vT=Xoxn_hʆA)p^5I׺6 { h[cd#nzE|MN horZQ*GQZĕ&_b,4'M&C֔{nSF@L( {gu2S'-ҊI m4l5b3w࢈~|vŰ&KU65}KQzb<˭5»cJvFrHuGR~mn(j#U1&ľflInUpmfj+b|٠T#疖TkZ;23Oy)n%i-JX/o-w5Phx8x[x[ IIWpBzjDvPy1Ĥ]L:ahT: I$}'p|A I/Q(!VZ~X跭ˬ$EhO9 -鿴C\򂛉N6eW0Mf; ~ц+e'e'e'e藕a794($X2*558qRGMl̨8n5>Z/R׼!uu!UsR|=;; 5_v㏗yXS$ՕP*VJ\]VߊX4RdVb %H j<0-(J$ykoq@\ @:,Pv'$oKgw#RYp9 Ŏ X(5Ϋc\||.bg^:Npy C?SC)̃L{!PḲ6b E[KGP%G[Bzhկz/MUHr.&>ya;zZE ԤΣJ[bQc|1l~@yѩI__sKSk=̝pjo\9v|DvĶ I `;~5k ҍ/F./Вw`Ҏ}vrp>|Hl\ZsG}nat.8?~>.vsz_-ݸLW}U|}R_| 8u|/?ORo5 FශPq/Mҭlw<ﯛz93,M!V~/&E_}V j'SPǿ= yIih|oCkoQn\vq}z>Ť,7+K&|ur+=VMǭ;?UkM3`xv3#  Ł# b~4b^oḕ &GK˼9EZ~7y!\EysDMu!yӔ9jvRw=Xi>hєR,ѼI9$c{c+=)w - gjrUW-TGc%oq l} $*´cadJT/ <냚Z^$-Jah[WSJ*{Ưb';+"i9Z6ε mCj EQ6 dcviGJoVܬ@{\ `|C+A{l\Lb/C*ZE,ٞ -E⌑d;.<:avQ=םK(2eS[H*6VEP1N()i*uNH/0x--^ŠJ^VkpaJ0oz0&x1 E :0%8Dh#F:G"†kmZf6h`4ЮU;6 av./BYpVd}l,nfLyE'(M6uuq&Ofr_uk2[4gmg:n:aPwC@ʽ7AD"B/LyV}KE<*/'n>Ȳ>ͅriY;ZP QkȄDt~6y?!;%0ZfA'59pf3~kVAI4k VY[Rzoȩ+ri)S"x )\[~2[l^q*Sd5hICfE>p[c3&vBl跄i&nFV)Ai _\$utxۘD[[mm Iyޫr\Y $vۦO<#w@S<5(=2 mKlKMP oW yY *>-|$(.c9Fma )&2d}յT5hVBZsrp%p7hJVa7OdeR*AQzAzmɅo3 C)@mP}O7pWqI˪[ѕ6;5n*`memT5Wz06 -*mP+O*Rڴs {XTx,-{V6phFrTM=x[O#aBe{>DZ^ۙ!=8ORN98sB9XXpmv 5:M-WOy\ 6}fR0D\d. )0bmDĸt8-ej;gucOiKLIglkt[ o=iƕTR湙TC&pRq&*) MBI`.6 : 1Yî=ޮV<#BI韉t)iק^nk?nkZ>xjC9#r'}Dh<R3GGQ* 'ZcŢuug/R.~t!Fدioio_ޛ. ;,CO4葧!"(< )D.":I k{݆j {-\2^,U" pX)JMJiuJipY!wU1-&K)@%H{KFQQbpC@\S6FJ"DUUQ麶)"H5~Um- sK(Q% qQ]8j' `+|tj"09*b-^R *f۟|c6ic6卩K$*ȀE9 H]KJ%lH@D]º r 6xNLh,!)Wh8)8)_,v$ 0 NLuqBhj34"A J]eѮP-Ѯt{2*IB*`]Sqk\HBXc1a#6/Th JK/{6#?E9<׽[<\IQ٢J#J+EV%"gϐ9u:{a4;>|9-_M>U ,> _f 磳sr#\gJ 2ߓ (9õף,Ǘóp6]b w ?ߏ&vvrtlGgq7>[}˟s~d|y<DDp.5۷s[}*|!ltxwdWޜۘwUG~|#-`vt}s8]ȯGF/:P:~pOÇսWbƥt<5Ůy-Hiq DoS/oil?ਏX^Fpz҂Ϟ&^̋]LA:G !T?t8.ɊNҏrgyOͽTϦHE3(hN?}A:ђOϟp}9y|Ynćׯo ʏ%~*qV`ʏv};1uR ~5co\f>cdI啯.r\>%v*@/tHˈ1LR 4S!(20)Wޙ2`~ZK OEB g N44.hMOג{} Nbo7)ՄG\q#1oT1G0R9ߏ#[9E{-hF ;Ql țG$"o ɛW!ohyQ[)j:r > \VbrdקGWx_BK-72 q< X:ęeD$~]e$Y`+Kq'o]R]??XjC|\-m^Z? _?ݫKu}VQq5C󬺮}.V\m5{S{]kQpG]qAm]g}*::4?hmR7ڱm_*6o.PMI}7Ye Nq` tjd{&e6b֐mw&T6< HMʔ\lS icq2Ҩ] 4\4.h FhL!!*@PI#j\i.(*dCʤBs!W`v7Ǖj"9J 9EԖB)Pъ@uJ኶m2(ݪj{B!y0ޡH(4ڪfPV_?b,U]k]1&&o2*YŤkp5Ti\ZvM88 ZѶM) Qh\)"od !zd}dќ7K CQGfEFF|.=RP6sRM%sMۊ6 HE ɤT Ɍ(?nL,egII"4ȜqFS#w1'R] J݌\. _k KӦSf$Z `_Yi{_ hvCDJG6'6+*h[٦Pqus=@+Ů.xt<, G;:γ(6RaSE8qQh'Wr{`YH+ąG@&ǵbyҸ]IpHXVjR=m5QU@!W+\6pP(+\. }˾e_ᲯpF. ܾe_SC4:f;'n57, MG^#vs~v>>[kO͝b=" /]XQ8$l&Y 7'xE$"5ie^-Wm꼤aٰO0ml[1̩:uTl!ǝ%aK$S6уsc#"zV Lp46{FQd-=P!˿fo,DfDmwE0'DtKdiS脣vXV3! 9/"릋lFd!tݥ/B>J:dInV`k#I!{~-D!Us.kL,՛Ӿsȴ+gd'OqoVyeucޤ,7: @h2 3e^ں˱\A"'x-YXnִ$<[w7rMhsTP;x- 5:Of X?%}k(2$d&/J;#[,%Lv5§..i8iE j%n$zW-J^?6 E5Ptw3@qb]ƬdRwkxJ)VxU8,SD1-w4ٶt=Ŋ!K z@y0+L,ѻ.]"B.K߻.K㙆.['Kߑ_ ߿Mó1􃱥T=*T]NqǃU@}@wEf}0eDn|.+1<(֋E*s!q[yscD@{/ͺj@n4BH &iVO|3SW}pTDeU%5 }kȑ :i]$lXmbk[i82IG@DI9X&YB3\'8;T`cpu/߸i@.bT?QJ-]$Dt٩0yo+V?;jq'S?+@/%&Y?jpSR?dmqMͽŷnVJG3rukj@uiSrvpMhb>=$xdzuUpz$ r#3$:)yn-Wt.=\6B!VBu#y ьTpkn%b D܉u%hAR5)Xbc5e6(;aPQ4!8Be` 9gR:RsJv'_!1%C8(nycX'$pP|.F\3VD\?.o -O Vgz+O1hy,Ypn / Yk1l-h0%1BsKrL& &"I\N銭7أWt`[ΌS\)ըVف7ܗ괺@-[ v_Q׌*l.&pDm9E҃q"+5Хl3A1OrYh-i6CXHD(EZJl70BeD)FmOKkW?a*-cW[Z`wF%V({eHa!Zh!ޕy!̂&:>@u^}+aD _?l.C=MtV=o/xwՋo>8Ѫ?۳d zhcKkf)'b7*JHO&1qf3V3 q#V1DVmE ŊVG>R 80)g%]R197|ov`(ΐb%aMPp7˷;Omr!0EgHK: \ #&Pܻ 5w=A-_| w2#^ߜ޿ךJ)&_鎈8_~\kVٿׂMg?oнܻgSFA[n}Z.nm]Gj]޵g1q ꛯWA~D}rtϗ;Y2#~:χģfJ ikr 9 #+ wVrueHiCU:nISay?~wh`O#I.K;.TCIEr7V$v2 _ z$s/v. N{1Ve!FZ?F!VW+V r+ِFz8#vA NyӵW)|#W>}7/MzXjv%KIZ-eXͨRk(ɒUldhI@_3OI>) {ARL"DrxMiLhyh7J![0UٽT;ӝ! E:Y|X_\xuBZjOF(go+ '; ;Q᷷ijijijijϺ*953;A+;WnP5(`%R L+#V'wċSh"o4oap|om}t%J1)+kTqeY8XIjLnQ-!KvʃE$aԝb4/PgV:ҹcb e1t]^TQ298i0b\f؂'EHġHxI-E_ F"Cë_lZD%sRRW 6DS5% BpP} J U ljT,Mt¸Uc ?KM.x3ƭhLɶ]u>Vm9;P,f-\ Z9Vh&Nqm_YhC6jTw$BDRPbFͺQ1cgzO~bL%U~nQz8Efg9:]"iB{k},(Չ39Uf45OW09YJ_nέ$,e7G)csRo9Bk.SXp:;XÓr̒R$rܟs9\(cDqt( ),/[ -'PGK ; 1N `'s]agÖP=s66Zv:Ogl»ftBg ke<bYYYQR]fe@C^锆[-vӬVAꔾ#lSnm y*ZS@gqn$VAꔾ#@j=4݆АWTմL|}=%et #^q=βĻ{,WvKޘŽ }6Eyhga b`$^at"N<+-驊ͤލd$_؄ N~ '9w @]J{H . +H Jfޡ".c &^>@6BRt5=3Rғ,3GZF_c?WOO.n #&f6 ZHq-,>) Aqu},@ fwFA'uXF.s;w3gBK2M< 5c/áSdb(~Ţ/1l[6# 0eNc~R;׊2R1P=kڿך?5c:. %,9+(S>]\^2r-)Ifo؍nN;r ].݆АWE:%qn`r[!IYWحYeTFJtx D սq(pdD{s\4#42mVr)}S;Qk'R65T0&`A#{/6s4uw=~Z[4gg (*DfTw&,]%!{Ea ,d֖/aWV\j\,C  6+n\0&PB[F4AHE^mr}b& ɸ묷Xv}[Kdpmpa&V{y W]\ې5 ~Zv.HU b%7T5hj˼f vnh0|A Yk)uSyKEnNV _ $J*@JR.Ϊ+g _~-9.b}MߤmpםL-g"T\ʇPSW17Dzl#M80h'P72X݃5~uw` FqJ'q N ОY,UNƏw4V:tE 05ŏ;IDr;_h$F)ZK 8vT度vK7z6'$όjMK||޷pч87{,C/\∫ĵDOsKBE' %RAkFԣYgz֟^_vԵMmDu[zR5Pq\Pge܃ImC$4Dհd:mO^ʂ [\ S@5,COᭋq~xDuZ5K3^S+f;8tU,15oS˱F_\5B DS],?%Uz\V. }SITMQGgY\Nj7;`ZKX4Lw0, aX8^U $1B7 o@{4N}?/_/NHW*{py$~:iZ6uRb*?6gfggֻ3m0A LSV*$$Ui %E . H,9UqyIaFB"r=mmhcL1u Y1 mB5Z]ш6J]U8*,6i&^PXLNpIS8`TG*I(!K :Af&2R蔄p["5ZZzXivJ.wu a m%!m (gF%DbM#r|,8UA6MbFB' x$\^LZίWՊ=!B[f-QQ.nR.H*&&A8ş? ,E#myh!D'˰[l2喻(y1!m=k\pžp+t/*9sD(j{-՚XmrFr\YZc%n{In h90x|=AB~7 .h8vX9OE\Ly^'ΙV{Ft&λb)A;>{:l.8p/Z 28}e^2\_&ϏԎѴc"8ϠEQ?xf RVٙk!U sG7_wegEi/o/[Eѿ?\5Oנrz^=VtGɋAﬓFm \-Nd4[p}+v˻#5SO졮YŻnys~{V`п"dMpa a2>XZcx:h]fvSW]nj͙golpo*X'`ru/ѥ5?^{]sy9R:UPaO'_.?FEمKI {rtxyuO`hã`zbMOo#py'w7xl<Oz٫&?**p:ʛ&P~ww 4`FE+F't굋 ȥ͛G`T~0/ue.[!ߢ^/hqbRZW}8?wԀ9UfvȊɫ^/le.第]WA̺ۥ>^Îfiu3`x|5# CjzyHE+M63(m"IkQ KqBeˆ 4QXҌbm 7XAl#a,J= 1TV *D(rݘwE@HjgI^'ɷ#;YH6ޱQm&T6au/]5HŒx ]yRhzS^;UPwV"&ՆӼtKӻE+ҺݐŝQ-X 94$12 F )8LbI)!,l|wRhMoxw*{О6&;^ OmIݙB(2,tA>$p \!;Տc?LV6ס!{tJӭ׭]GXNwb~81xnyZuhȞNQUEvTIhѾ*Fj( Zw\L O{6rr: S~ƲaVY 嫋9Y݋c?bAX١+I1WW m7~r'].H黻#]Y͵s0Xga,+?l~`|̙k")K k?bKRmKz 14860ms (00:08:53.206) Jan 30 00:08:53 crc kubenswrapper[4885]: Trace[371657534]: [14.860740827s] [14.860740827s] END Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.206849 4885 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.207388 4885 trace.go:236] Trace[65612605]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Jan-2026 00:08:38.529) (total time: 14677ms): Jan 30 00:08:53 crc kubenswrapper[4885]: Trace[65612605]: ---"Objects listed" error: 14677ms (00:08:53.207) Jan 30 00:08:53 crc kubenswrapper[4885]: Trace[65612605]: [14.677639429s] [14.677639429s] END Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.208212 4885 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.208755 4885 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 30 00:08:53 crc kubenswrapper[4885]: E0130 00:08:53.211276 4885 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 30 00:08:53 crc kubenswrapper[4885]: E0130 00:08:53.214503 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.229671 4885 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.567267 4885 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:58964->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.567352 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:58964->192.168.126.11:17697: read: connection reset by peer" Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.567277 4885 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51222->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.567480 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51222->192.168.126.11:17697: read: connection reset by peer" Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.568025 4885 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.568153 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.814234 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:08:53 crc kubenswrapper[4885]: I0130 00:08:53.821840 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.052329 4885 apiserver.go:52] "Watching apiserver" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.061059 4885 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.062563 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.063552 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.063718 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.063921 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.064154 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.064232 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.064915 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.064924 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.065186 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.065489 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.071342 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.072531 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.073115 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.073584 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.074072 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.074510 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.075558 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.075644 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.075752 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.078654 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 13:52:04.311495728 +0000 UTC Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.113703 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.113747 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.113845 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.113900 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.113942 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.113982 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.117429 4885 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.130636 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.131439 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.132004 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.132030 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.132128 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:54.632092347 +0000 UTC m=+21.223564105 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.131957 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.134460 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.134691 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.134992 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.135301 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:54.63526635 +0000 UTC m=+21.226738148 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.134853 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.155549 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.164910 4885 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.174993 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.194798 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.213341 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.214472 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.214565 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.214639 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.214840 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.214937 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215033 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.214844 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215172 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215253 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215343 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215534 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.216693 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215171 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.216787 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215339 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215607 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.215876 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.217205 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.217542 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.217999 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219447 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219518 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219604 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219640 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219670 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219699 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219730 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219783 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219814 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219840 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219900 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219928 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.219981 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220000 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220018 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220057 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220088 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220119 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220152 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220181 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220222 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220227 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220330 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220364 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220395 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220426 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220435 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220452 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220477 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220472 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220476 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220576 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220610 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220638 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220615 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220668 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220938 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220999 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221037 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221087 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221116 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221142 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221177 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221201 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221220 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221242 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221268 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221288 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221318 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221343 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221365 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221392 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221431 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221487 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221522 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221551 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221578 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221603 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221629 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221661 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221689 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221711 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221736 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220906 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.220972 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221126 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221276 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221422 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221593 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.221796 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:08:54.721743196 +0000 UTC m=+21.313214944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223607 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223653 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223796 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223926 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221888 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222068 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222085 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222176 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222276 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222334 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222800 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.222838 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223187 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223288 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223403 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.224032 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223667 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.224724 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.224956 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.224972 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.225023 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.225493 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.225859 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.225873 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.225906 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226011 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.223682 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226186 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226238 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226277 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226335 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226376 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226413 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.226016 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227361 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227699 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227750 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227807 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227836 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227868 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227901 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227933 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.227977 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228009 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228041 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228068 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228098 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228129 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228159 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228190 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228223 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228251 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228283 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228317 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228349 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228378 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228409 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228441 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228473 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228503 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228536 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228568 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228598 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228629 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228668 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228694 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228726 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228785 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228816 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228846 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228877 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228905 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228938 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228970 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.228999 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229030 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229062 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229095 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229124 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229156 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229188 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229217 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229248 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229279 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229309 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229342 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229376 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229409 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229436 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229468 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229499 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229526 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229557 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229949 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229988 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230017 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230052 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230085 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230113 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230142 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230171 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230219 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230248 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230279 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230310 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230338 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230369 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230403 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230432 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230465 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230500 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230535 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230565 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230598 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230631 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230662 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230695 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230726 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230755 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230803 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230834 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230865 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230892 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230924 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230953 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230982 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231012 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231042 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231074 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231104 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231136 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231165 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231189 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231220 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231251 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231290 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231323 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231347 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231376 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231408 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231439 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231467 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231500 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231532 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231558 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231587 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231616 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231644 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231670 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231699 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231728 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231754 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231799 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231828 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231913 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231969 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232014 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232093 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232129 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232202 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232235 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232267 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232300 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232331 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232430 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232536 4885 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232579 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232600 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232616 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232637 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232650 4885 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232664 4885 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232678 4885 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232697 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232714 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232732 4885 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232750 4885 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232781 4885 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232794 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232809 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232824 4885 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232835 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232846 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232858 4885 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232873 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232887 4885 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232899 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240239 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240305 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240342 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240365 4885 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240378 4885 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240426 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240440 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240467 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240481 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240512 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240524 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240543 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240554 4885 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240566 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240585 4885 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240598 4885 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240609 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240620 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240637 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240650 4885 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240667 4885 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240709 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229934 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229983 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.229987 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230644 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.230820 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231058 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231094 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.250170 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.231936 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232005 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232305 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232350 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.232594 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.252559 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.256240 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.250407 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.233806 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.233955 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.234467 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.234900 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.234989 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.235616 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.235811 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.221818 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.236363 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.237664 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.237738 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.238144 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.237940 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.238554 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.238672 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.239014 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.239270 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.239353 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.239646 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240069 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.240277 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.241245 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.241465 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.241855 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.241877 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.242321 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.242176 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.242351 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.242676 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.243435 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.243839 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.244032 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.244317 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.244346 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.245730 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.245819 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.245873 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.245932 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.246530 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.247128 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.247589 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.248647 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.249462 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.251083 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.262374 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.262377 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.262256 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.262524 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.262653 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.262749 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.263405 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.263606 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.263946 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.264803 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.250983 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.264903 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.264806 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.265856 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.265194 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.266618 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.268325 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.232908 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.270285 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:54.770123119 +0000 UTC m=+21.361594877 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.273842 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.274073 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.274198 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.274490 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.274855 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.275146 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.275516 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.275912 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.276114 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.276362 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.276426 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.276677 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.276703 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.276986 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.277222 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.277447 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.277849 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278202 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278281 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278391 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278404 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278557 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278973 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.278979 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.279143 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.279272 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.279308 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.279544 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.279842 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.279899 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280233 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280258 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280361 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.280383 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.249482 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.280463 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:54.780438305 +0000 UTC m=+21.371910053 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280471 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280681 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280847 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.280956 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.281029 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.281066 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.281507 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.281517 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.281554 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.281991 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.282180 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.282483 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.282713 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.283158 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.283727 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.284469 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.284551 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.284758 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.284852 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.285407 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.285474 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.285645 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286048 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286121 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286213 4885 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e" exitCode=255 Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286317 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286430 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286465 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286521 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e"} Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.286720 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.287013 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.287241 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.287337 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.287575 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.287627 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.288568 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.288750 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.288822 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.288914 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.289073 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.289642 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.291851 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.306482 4885 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.310414 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.311647 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.312513 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.316385 4885 scope.go:117] "RemoveContainer" containerID="b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.323209 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.323860 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.325605 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.327736 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342059 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342216 4885 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342236 4885 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342248 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342259 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342271 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342280 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342272 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342291 4885 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342393 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342418 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342442 4885 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342460 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342479 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342496 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342518 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342534 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342552 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342573 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342592 4885 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342611 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342632 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342648 4885 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342665 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342683 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342717 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342737 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342757 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342798 4885 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342663 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342817 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342968 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.342989 4885 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343003 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343016 4885 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343027 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343039 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343051 4885 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343062 4885 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343076 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343090 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343102 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343115 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343127 4885 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343155 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343166 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343179 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343189 4885 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343199 4885 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343210 4885 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343222 4885 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343232 4885 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343242 4885 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343253 4885 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343265 4885 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343274 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343285 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343295 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343304 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343313 4885 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343329 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343340 4885 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343353 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343364 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343378 4885 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343388 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343398 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343408 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343418 4885 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343429 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343437 4885 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343448 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343458 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343468 4885 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343478 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343490 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343501 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343512 4885 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343521 4885 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343532 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343544 4885 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343553 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343562 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343572 4885 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343583 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343593 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343603 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343612 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343623 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343633 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343645 4885 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343655 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343665 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343676 4885 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343687 4885 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343697 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343707 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343718 4885 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343728 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343739 4885 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343750 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343759 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343824 4885 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343837 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343847 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343857 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343867 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343876 4885 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343886 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343897 4885 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343906 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343915 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343925 4885 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343935 4885 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343944 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343954 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.343987 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344016 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344027 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344038 4885 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344049 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344059 4885 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344068 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344078 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344088 4885 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344100 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344111 4885 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344122 4885 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344132 4885 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344141 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344150 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344160 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344170 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344180 4885 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344189 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344198 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344208 4885 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344218 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344227 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344237 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344248 4885 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344258 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344267 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344277 4885 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344287 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344296 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344305 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344315 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344326 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344401 4885 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344413 4885 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344423 4885 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.344846 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.357937 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.374326 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.389245 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.396992 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.408107 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.408350 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.431612 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.442512 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.462140 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: W0130 00:08:54.463909 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-20eb79d8fe83a8a6f021854f73941f6f09b4b058d3bdf42f7b3928345d9309c2 WatchSource:0}: Error finding container 20eb79d8fe83a8a6f021854f73941f6f09b4b058d3bdf42f7b3928345d9309c2: Status 404 returned error can't find the container with id 20eb79d8fe83a8a6f021854f73941f6f09b4b058d3bdf42f7b3928345d9309c2 Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.481794 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.502105 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.524148 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.553261 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.565426 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.577065 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.650414 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.650478 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.650650 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.650669 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.650682 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.650741 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:55.650722367 +0000 UTC m=+22.242194115 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.651218 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.651238 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.651246 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.651303 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:55.651296362 +0000 UTC m=+22.242768110 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.750894 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.751162 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:08:55.751133604 +0000 UTC m=+22.342605372 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.852562 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:54 crc kubenswrapper[4885]: I0130 00:08:54.852641 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.852746 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.852830 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:55.852812356 +0000 UTC m=+22.444284094 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.853250 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:54 crc kubenswrapper[4885]: E0130 00:08:54.853285 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:55.853277348 +0000 UTC m=+22.444749086 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.079627 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 00:26:33.870440466 +0000 UTC Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.290753 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"20eb79d8fe83a8a6f021854f73941f6f09b4b058d3bdf42f7b3928345d9309c2"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.292827 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.292864 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.292881 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7fe9edeae8c586c27b89e46f644d1882976908c3b6724b5032e8b5116467d75d"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.295355 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.295395 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6db5f31b20653fc8992c74347cd82e80e7e3da9617cea28ced63b752928f4de1"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.298827 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.302118 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a"} Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.302159 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.317712 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.338695 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.353463 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.369789 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.386652 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.403403 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.419677 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.439143 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.464316 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.481919 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.531155 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.555170 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.577728 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.606480 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.629491 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.650149 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:55Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.659662 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.659707 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.659867 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.659891 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.659905 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.659962 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:57.65994698 +0000 UTC m=+24.251418728 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.659992 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.660036 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.660060 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.660163 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:57.660131405 +0000 UTC m=+24.251603173 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.760877 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.761082 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:08:57.761053786 +0000 UTC m=+24.352525544 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.862463 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:55 crc kubenswrapper[4885]: I0130 00:08:55.862869 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.863232 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.863257 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.863341 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:57.863309101 +0000 UTC m=+24.454780849 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:55 crc kubenswrapper[4885]: E0130 00:08:55.863367 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:08:57.863360293 +0000 UTC m=+24.454832041 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.080717 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 02:51:04.089165408 +0000 UTC Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.141307 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:56 crc kubenswrapper[4885]: E0130 00:08:56.141502 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.141652 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:56 crc kubenswrapper[4885]: E0130 00:08:56.142097 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.142224 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:56 crc kubenswrapper[4885]: E0130 00:08:56.142328 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.146910 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.147753 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.149118 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.150121 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.151030 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.151803 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.152721 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.153644 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.154390 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.157223 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.157818 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.159174 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.159716 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.160318 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.163363 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.164001 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.165196 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.165643 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.166327 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.167473 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.168090 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.169198 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.169695 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.174119 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.175163 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.175929 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.177443 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.178056 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.179297 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.179950 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.180829 4885 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.180931 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.182475 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.183039 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.183831 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.185249 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.185899 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.186728 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.187393 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.188392 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.189082 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.190003 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.190566 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.191564 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.192021 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.192890 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.193357 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.194395 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.194885 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.195646 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.196128 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.197020 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.197548 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.198005 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.555437 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.575678 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.576397 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.576639 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.597919 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.614163 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.629983 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.650035 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.664815 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.678752 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.694620 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.731395 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.753924 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.776187 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.791319 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.812128 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.826329 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.845573 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.862249 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:56 crc kubenswrapper[4885]: I0130 00:08:56.879077 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:56Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:57 crc kubenswrapper[4885]: I0130 00:08:57.081522 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 03:45:25.442468284 +0000 UTC Jan 30 00:08:57 crc kubenswrapper[4885]: I0130 00:08:57.680973 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:57 crc kubenswrapper[4885]: I0130 00:08:57.681039 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681253 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681283 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681308 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681358 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681402 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:01.681378994 +0000 UTC m=+28.272850782 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681424 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681494 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.681603 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:01.681569179 +0000 UTC m=+28.273041117 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:08:57 crc kubenswrapper[4885]: I0130 00:08:57.781683 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.782067 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:09:01.782009818 +0000 UTC m=+28.373481606 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:08:57 crc kubenswrapper[4885]: I0130 00:08:57.883583 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:57 crc kubenswrapper[4885]: I0130 00:08:57.883740 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.883990 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.884189 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:01.884154111 +0000 UTC m=+28.475625899 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.885122 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:57 crc kubenswrapper[4885]: E0130 00:08:57.885259 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:01.885228189 +0000 UTC m=+28.476700077 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.081705 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 10:14:24.317019559 +0000 UTC Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.141842 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.141956 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:08:58 crc kubenswrapper[4885]: E0130 00:08:58.142087 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.142174 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:08:58 crc kubenswrapper[4885]: E0130 00:08:58.142292 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:08:58 crc kubenswrapper[4885]: E0130 00:08:58.142486 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.313461 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0"} Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.334946 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.359527 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.381513 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.404664 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.442056 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.465700 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.484626 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.501443 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.517260 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.861710 4885 csr.go:261] certificate signing request csr-n25r4 is approved, waiting to be issued Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.870648 4885 csr.go:257] certificate signing request csr-n25r4 is issued Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.976919 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-zrbl7"] Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.977243 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-4t96d"] Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.977475 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.977885 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.979327 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.979874 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.980004 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.981095 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.981317 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.982096 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 30 00:08:58 crc kubenswrapper[4885]: I0130 00:08:58.990005 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.002944 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:58Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.024162 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.034812 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.046572 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.064631 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.082276 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 03:23:19.175860752 +0000 UTC Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.085050 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.096032 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg7zc\" (UniqueName: \"kubernetes.io/projected/a7dcff61-ca91-42c4-83dc-2a502099dff1-kube-api-access-vg7zc\") pod \"node-resolver-4t96d\" (UID: \"a7dcff61-ca91-42c4-83dc-2a502099dff1\") " pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.096072 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f93c3da6-7e48-4079-9673-455594d63c9b-serviceca\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.096090 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f93c3da6-7e48-4079-9673-455594d63c9b-host\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.096114 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn6dq\" (UniqueName: \"kubernetes.io/projected/f93c3da6-7e48-4079-9673-455594d63c9b-kube-api-access-gn6dq\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.096132 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7dcff61-ca91-42c4-83dc-2a502099dff1-hosts-file\") pod \"node-resolver-4t96d\" (UID: \"a7dcff61-ca91-42c4-83dc-2a502099dff1\") " pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.107346 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.120935 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.134442 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.146941 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.161956 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.181446 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.196967 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7dcff61-ca91-42c4-83dc-2a502099dff1-hosts-file\") pod \"node-resolver-4t96d\" (UID: \"a7dcff61-ca91-42c4-83dc-2a502099dff1\") " pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.197030 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg7zc\" (UniqueName: \"kubernetes.io/projected/a7dcff61-ca91-42c4-83dc-2a502099dff1-kube-api-access-vg7zc\") pod \"node-resolver-4t96d\" (UID: \"a7dcff61-ca91-42c4-83dc-2a502099dff1\") " pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.197081 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f93c3da6-7e48-4079-9673-455594d63c9b-serviceca\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.197098 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f93c3da6-7e48-4079-9673-455594d63c9b-host\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.197201 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7dcff61-ca91-42c4-83dc-2a502099dff1-hosts-file\") pod \"node-resolver-4t96d\" (UID: \"a7dcff61-ca91-42c4-83dc-2a502099dff1\") " pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.197410 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f93c3da6-7e48-4079-9673-455594d63c9b-host\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.197480 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn6dq\" (UniqueName: \"kubernetes.io/projected/f93c3da6-7e48-4079-9673-455594d63c9b-kube-api-access-gn6dq\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.201012 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f93c3da6-7e48-4079-9673-455594d63c9b-serviceca\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.201206 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.229963 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.238375 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn6dq\" (UniqueName: \"kubernetes.io/projected/f93c3da6-7e48-4079-9673-455594d63c9b-kube-api-access-gn6dq\") pod \"node-ca-zrbl7\" (UID: \"f93c3da6-7e48-4079-9673-455594d63c9b\") " pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.242234 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg7zc\" (UniqueName: \"kubernetes.io/projected/a7dcff61-ca91-42c4-83dc-2a502099dff1-kube-api-access-vg7zc\") pod \"node-resolver-4t96d\" (UID: \"a7dcff61-ca91-42c4-83dc-2a502099dff1\") " pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.253844 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.273780 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.297185 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-zrbl7" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.299219 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-4t96d" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.308244 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.321430 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-zrbl7" event={"ID":"f93c3da6-7e48-4079-9673-455594d63c9b","Type":"ContainerStarted","Data":"af7d30f8ecd49dfa43fb367c922e6f87746f0dd182b4c3176edfff0c50826346"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.332101 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.349256 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.376255 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.390750 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.407819 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.408213 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-xmv9h"] Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.408520 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.415920 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.416054 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.416149 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.416177 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hwpvs"] Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.416302 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.416683 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.418383 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-bmd5j"] Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.418788 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-72hlw"] Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.419198 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.419464 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.420160 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424072 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424348 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424538 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424675 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424734 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424806 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.424910 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.425645 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.425842 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.426060 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.426287 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.426479 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.426592 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.426644 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.436791 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.458871 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.483353 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.499889 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509445 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-etc-kubernetes\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509484 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-os-release\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509501 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-hostroot\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509518 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-multus-certs\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509537 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-cnibin\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509556 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f11e547-11fd-417a-be4a-e4f37d8e7839-cni-binary-copy\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509650 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-socket-dir-parent\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509697 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-kubelet\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509880 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-daemon-config\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509903 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-system-cni-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509921 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-netns\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509936 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-cni-bin\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509963 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-cni-multus\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509981 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-conf-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.509997 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-cni-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.510013 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-k8s-cni-cncf-io\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.510030 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gxj7\" (UniqueName: \"kubernetes.io/projected/3f11e547-11fd-417a-be4a-e4f37d8e7839-kube-api-access-5gxj7\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.512074 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.532801 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.551923 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.569173 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.589687 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611371 4885 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611527 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-cni-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611580 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-systemd\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611605 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5pbd\" (UniqueName: \"kubernetes.io/projected/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-kube-api-access-s5pbd\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611642 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-etc-kubernetes\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611664 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-kubelet\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611683 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-ovn-kubernetes\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611714 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611733 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-socket-dir-parent\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611761 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611796 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-netns\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611812 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-var-lib-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611829 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhwkm\" (UniqueName: \"kubernetes.io/projected/147e5e96-db98-498f-b4a4-927d73cb5db5-kube-api-access-dhwkm\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611848 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-os-release\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611868 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-slash\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611886 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cni-binary-copy\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611907 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41b99e9c-eadb-404c-9596-1b102ac85157-mcd-auth-proxy-config\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611934 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-system-cni-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611957 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-netns\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611978 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-cni-bin\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.611999 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-daemon-config\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612014 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-socket-dir-parent\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612021 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-bin\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612099 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gxj7\" (UniqueName: \"kubernetes.io/projected/3f11e547-11fd-417a-be4a-e4f37d8e7839-kube-api-access-5gxj7\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612109 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-etc-kubernetes\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612131 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-ovn\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612124 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-netns\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612149 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-cni-bin\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612158 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-netd\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612113 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612221 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-cni-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612336 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612368 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-config\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612408 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-k8s-cni-cncf-io\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612412 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-system-cni-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612452 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-k8s-cni-cncf-io\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612466 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-hostroot\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612492 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-hostroot\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612505 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-script-lib\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612539 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-tuning-conf-dir\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612573 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-os-release\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612612 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-etc-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612682 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-node-log\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612723 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-env-overrides\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612758 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/41b99e9c-eadb-404c-9596-1b102ac85157-proxy-tls\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612812 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-os-release\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612828 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-cnibin\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612863 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f11e547-11fd-417a-be4a-e4f37d8e7839-cni-binary-copy\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612832 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-daemon-config\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612896 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-kubelet\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612923 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-multus-certs\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612955 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-system-cni-dir\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612957 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-run-multus-certs\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612988 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cnibin\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612901 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-cnibin\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.612925 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-kubelet\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613015 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/41b99e9c-eadb-404c-9596-1b102ac85157-rootfs\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613062 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq7jl\" (UniqueName: \"kubernetes.io/projected/41b99e9c-eadb-404c-9596-1b102ac85157-kube-api-access-nq7jl\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613095 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-systemd-units\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613124 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-log-socket\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613158 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-cni-multus\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613179 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-conf-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613201 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/147e5e96-db98-498f-b4a4-927d73cb5db5-ovn-node-metrics-cert\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613293 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-host-var-lib-cni-multus\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613319 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3f11e547-11fd-417a-be4a-e4f37d8e7839-multus-conf-dir\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.613555 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3f11e547-11fd-417a-be4a-e4f37d8e7839-cni-binary-copy\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.614549 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.614594 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.614606 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.614784 4885 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.621584 4885 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.621933 4885 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.623192 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.623236 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.623251 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.623268 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.623280 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.632823 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.634386 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gxj7\" (UniqueName: \"kubernetes.io/projected/3f11e547-11fd-417a-be4a-e4f37d8e7839-kube-api-access-5gxj7\") pod \"multus-xmv9h\" (UID: \"3f11e547-11fd-417a-be4a-e4f37d8e7839\") " pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.648975 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: E0130 00:08:59.649170 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.653245 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.653295 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.653308 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.653329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.653342 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.667055 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: E0130 00:08:59.668006 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.680799 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.680851 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.680861 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.680879 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.680891 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.703181 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: E0130 00:08:59.709955 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714186 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714228 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-netns\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714245 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-var-lib-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714269 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhwkm\" (UniqueName: \"kubernetes.io/projected/147e5e96-db98-498f-b4a4-927d73cb5db5-kube-api-access-dhwkm\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714284 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-os-release\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714327 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-slash\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714348 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cni-binary-copy\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714368 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41b99e9c-eadb-404c-9596-1b102ac85157-mcd-auth-proxy-config\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714368 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-netns\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714455 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-slash\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714393 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-bin\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714451 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-var-lib-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714496 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-os-release\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714493 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-config\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714528 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-bin\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714695 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-ovn\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714725 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-netd\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714755 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714802 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-script-lib\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714823 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-tuning-conf-dir\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714822 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-ovn\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714845 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-etc-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714874 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-etc-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714888 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-node-log\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714909 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-netd\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714910 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-env-overrides\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714945 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/41b99e9c-eadb-404c-9596-1b102ac85157-proxy-tls\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714967 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-system-cni-dir\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.714986 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cnibin\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715009 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/41b99e9c-eadb-404c-9596-1b102ac85157-rootfs\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715038 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq7jl\" (UniqueName: \"kubernetes.io/projected/41b99e9c-eadb-404c-9596-1b102ac85157-kube-api-access-nq7jl\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715091 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-systemd-units\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715114 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-log-socket\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715304 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/41b99e9c-eadb-404c-9596-1b102ac85157-mcd-auth-proxy-config\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715322 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-config\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715355 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-env-overrides\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715365 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-node-log\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715372 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-system-cni-dir\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715419 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/41b99e9c-eadb-404c-9596-1b102ac85157-rootfs\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715419 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cni-binary-copy\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715463 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cnibin\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715488 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/147e5e96-db98-498f-b4a4-927d73cb5db5-ovn-node-metrics-cert\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715497 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715527 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-systemd-units\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715530 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-systemd\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715561 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-systemd\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715851 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-script-lib\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715905 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-log-socket\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715560 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5pbd\" (UniqueName: \"kubernetes.io/projected/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-kube-api-access-s5pbd\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.715983 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-kubelet\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716010 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-ovn-kubernetes\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716019 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-kubelet\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716032 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716063 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-ovn-kubernetes\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716068 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-openvswitch\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716032 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-tuning-conf-dir\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.716424 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720192 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/41b99e9c-eadb-404c-9596-1b102ac85157-proxy-tls\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720397 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/147e5e96-db98-498f-b4a4-927d73cb5db5-ovn-node-metrics-cert\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720406 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720448 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720467 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.720477 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.730134 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-xmv9h" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.741742 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.749843 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq7jl\" (UniqueName: \"kubernetes.io/projected/41b99e9c-eadb-404c-9596-1b102ac85157-kube-api-access-nq7jl\") pod \"machine-config-daemon-bmd5j\" (UID: \"41b99e9c-eadb-404c-9596-1b102ac85157\") " pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.751089 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.752229 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5pbd\" (UniqueName: \"kubernetes.io/projected/b01f0fb2-4c71-437b-9ac2-5ca44830f3a5-kube-api-access-s5pbd\") pod \"multus-additional-cni-plugins-72hlw\" (UID: \"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\") " pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.756300 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-72hlw" Jan 30 00:08:59 crc kubenswrapper[4885]: E0130 00:08:59.770184 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: W0130 00:08:59.770306 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41b99e9c_eadb_404c_9596_1b102ac85157.slice/crio-a684857d176a71ff76af3e48dc28260547557230f7f369f5e319d2b4ea43a89c WatchSource:0}: Error finding container a684857d176a71ff76af3e48dc28260547557230f7f369f5e319d2b4ea43a89c: Status 404 returned error can't find the container with id a684857d176a71ff76af3e48dc28260547557230f7f369f5e319d2b4ea43a89c Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.780309 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhwkm\" (UniqueName: \"kubernetes.io/projected/147e5e96-db98-498f-b4a4-927d73cb5db5-kube-api-access-dhwkm\") pod \"ovnkube-node-hwpvs\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.782126 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.782167 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.782177 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.782192 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.782202 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.797060 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.828061 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: E0130 00:08:59.828151 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: E0130 00:08:59.828259 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.835982 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.836021 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.836031 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.836048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.836059 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.856049 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.872610 4885 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-30 00:03:58 +0000 UTC, rotation deadline is 2026-10-13 16:20:10.01503969 +0000 UTC Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.872676 4885 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6160h11m10.142367226s for next certificate rotation Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.892670 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.913566 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.927305 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.937902 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.937947 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.937960 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.937977 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.937990 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:08:59Z","lastTransitionTime":"2026-01-30T00:08:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.946325 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.960117 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.978071 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:08:59 crc kubenswrapper[4885]: I0130 00:08:59.995002 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:08:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.014050 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.041181 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.041234 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.041247 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.041267 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.041281 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.042156 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.043627 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:00 crc kubenswrapper[4885]: W0130 00:09:00.060179 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod147e5e96_db98_498f_b4a4_927d73cb5db5.slice/crio-f4f9113690925a02f48604f583a736200e31613e3fe941f9675bfdcdfaa4cad3 WatchSource:0}: Error finding container f4f9113690925a02f48604f583a736200e31613e3fe941f9675bfdcdfaa4cad3: Status 404 returned error can't find the container with id f4f9113690925a02f48604f583a736200e31613e3fe941f9675bfdcdfaa4cad3 Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.083404 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 02:57:28.669881084 +0000 UTC Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.141072 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.141201 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.141081 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:00 crc kubenswrapper[4885]: E0130 00:09:00.141217 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:00 crc kubenswrapper[4885]: E0130 00:09:00.141379 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:00 crc kubenswrapper[4885]: E0130 00:09:00.141462 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.143785 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.143840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.143854 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.143870 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.143881 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.247116 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.247166 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.247177 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.247194 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.247205 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.333524 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc" exitCode=0 Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.333617 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.333648 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"f4f9113690925a02f48604f583a736200e31613e3fe941f9675bfdcdfaa4cad3"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.338855 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.338911 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.338935 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"a684857d176a71ff76af3e48dc28260547557230f7f369f5e319d2b4ea43a89c"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.340385 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerStarted","Data":"8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.340413 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerStarted","Data":"7b4af2e43789963a46ca2ca6edc4038b8fe8313896f49cdd2b15414360f61d88"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.343465 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerStarted","Data":"f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.343510 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerStarted","Data":"8f47792b8d5ea71dd9f109a5c0320dc696e23dcaea8371cb7251e15c39d96acf"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.346292 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-4t96d" event={"ID":"a7dcff61-ca91-42c4-83dc-2a502099dff1","Type":"ContainerStarted","Data":"d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.346351 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-4t96d" event={"ID":"a7dcff61-ca91-42c4-83dc-2a502099dff1","Type":"ContainerStarted","Data":"02909774fedabd634673c60182df58c5c97149ccd60cffc90b27cd4f5c5f9b59"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.348627 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-zrbl7" event={"ID":"f93c3da6-7e48-4079-9673-455594d63c9b","Type":"ContainerStarted","Data":"c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.348932 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.348961 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.348970 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.348981 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.348994 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.354263 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.367529 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.380521 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.405277 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.426240 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.444720 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.451741 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.451841 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.451860 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.451885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.451906 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.457890 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.473197 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.489479 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.500997 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.515886 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.542612 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.554573 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.554615 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.554629 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.554649 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.554682 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.560672 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.582086 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.595813 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.613112 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.624459 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.647183 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.657580 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.657622 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.657632 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.657647 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.657657 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.662176 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.697127 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.714220 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.730594 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.750561 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.760989 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.761038 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.761048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.761064 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.761075 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.764978 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.783221 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.802154 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.817117 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.833994 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.849112 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.863886 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.863963 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.863978 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.864003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.864017 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.866596 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.969565 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.970011 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.970028 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.970052 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:00 crc kubenswrapper[4885]: I0130 00:09:00.970065 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:00Z","lastTransitionTime":"2026-01-30T00:09:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.075522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.075567 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.075588 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.075606 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.075619 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.084294 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 12:38:51.270633131 +0000 UTC Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.179698 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.179763 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.179819 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.179844 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.179866 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.282745 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.282828 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.282875 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.282902 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.282922 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.354892 4885 generic.go:334] "Generic (PLEG): container finished" podID="b01f0fb2-4c71-437b-9ac2-5ca44830f3a5" containerID="8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b" exitCode=0 Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.355004 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerDied","Data":"8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.364977 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.365047 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.365068 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.365087 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.365105 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.385718 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.390450 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.390495 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.390514 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.390542 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.390560 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.398847 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.413480 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.434830 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.449818 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.468318 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.485428 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.495501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.495576 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.495600 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.495635 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.495659 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.497588 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.511427 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.567716 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.604027 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.604086 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.604100 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.604124 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.604138 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.614566 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.638447 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.654734 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.677813 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.695415 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.707658 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.707723 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.707741 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.707778 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.707795 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.736275 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.736324 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736516 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736537 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736550 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736609 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:09.736591362 +0000 UTC m=+36.328063110 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736941 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736979 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.736994 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.737070 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:09.737046764 +0000 UTC m=+36.328518512 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.810862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.810930 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.810943 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.810967 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.810987 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.837349 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.837577 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:09:09.837530794 +0000 UTC m=+36.429002582 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.913409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.913787 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.913796 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.913808 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.913817 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:01Z","lastTransitionTime":"2026-01-30T00:09:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.939438 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:01 crc kubenswrapper[4885]: I0130 00:09:01.939868 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.939791 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.940233 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:09.940216091 +0000 UTC m=+36.531687839 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.940039 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:01 crc kubenswrapper[4885]: E0130 00:09:01.940423 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:09.940415206 +0000 UTC m=+36.531886954 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.017683 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.017850 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.017915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.018000 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.018054 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.085114 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 18:43:28.209575797 +0000 UTC Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.121495 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.121751 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.121886 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.121968 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.122040 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.142971 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:02 crc kubenswrapper[4885]: E0130 00:09:02.143116 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.143276 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.143298 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:02 crc kubenswrapper[4885]: E0130 00:09:02.143539 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:02 crc kubenswrapper[4885]: E0130 00:09:02.143800 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.225554 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.225909 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.226013 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.226106 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.226196 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.329189 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.329459 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.329544 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.329647 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.329727 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.370347 4885 generic.go:334] "Generic (PLEG): container finished" podID="b01f0fb2-4c71-437b-9ac2-5ca44830f3a5" containerID="178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590" exitCode=0 Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.370414 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerDied","Data":"178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.378048 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.390950 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.410608 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.423678 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.437706 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.437739 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.437749 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.437780 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.437791 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.451452 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.463635 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.473198 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.486014 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.498446 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.514550 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.537210 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.539826 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.539910 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.539928 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.539954 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.539973 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.554341 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.570618 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.586126 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.611614 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.627171 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.643348 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.643424 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.643444 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.643471 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.643490 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.747553 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.747624 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.747635 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.747655 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.747665 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.851346 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.851398 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.851409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.851426 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.851440 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.955349 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.955502 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.955534 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.955622 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:02 crc kubenswrapper[4885]: I0130 00:09:02.955733 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:02Z","lastTransitionTime":"2026-01-30T00:09:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.060158 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.060622 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.060858 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.061035 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.061181 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.086186 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 18:42:33.006062764 +0000 UTC Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.164372 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.164425 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.164447 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.164469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.164487 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.268046 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.268107 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.268126 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.268155 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.268174 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.371324 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.371370 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.371385 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.371404 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.371414 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.384989 4885 generic.go:334] "Generic (PLEG): container finished" podID="b01f0fb2-4c71-437b-9ac2-5ca44830f3a5" containerID="f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb" exitCode=0 Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.385038 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerDied","Data":"f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.406564 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.423934 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.444151 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.465866 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.473680 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.473727 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.473740 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.473758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.473786 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.482330 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.503215 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.536943 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.555723 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.572709 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.577022 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.577070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.577082 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.577104 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.577121 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.588700 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.610023 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.625163 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.647897 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.660235 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.672054 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:03Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.680349 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.680405 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.680416 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.680436 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.680452 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.783861 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.783915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.783928 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.783948 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.783966 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.886519 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.886593 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.886611 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.886640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.886660 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.898628 4885 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.989545 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.989590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.989602 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.989623 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:03 crc kubenswrapper[4885]: I0130 00:09:03.989637 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:03Z","lastTransitionTime":"2026-01-30T00:09:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.088926 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 17:01:15.413917673 +0000 UTC Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.093034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.093096 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.093132 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.093161 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.093181 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.142111 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.142315 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.142345 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:04 crc kubenswrapper[4885]: E0130 00:09:04.142761 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:04 crc kubenswrapper[4885]: E0130 00:09:04.142983 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:04 crc kubenswrapper[4885]: E0130 00:09:04.142871 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.172902 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.196720 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.196798 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.196818 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.196844 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.196862 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.203248 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.250216 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.269303 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.281068 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.293761 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.301241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.301391 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.301450 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.301532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.301597 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.314653 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.325438 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.343725 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.362039 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.377148 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.397056 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.400072 4885 generic.go:334] "Generic (PLEG): container finished" podID="b01f0fb2-4c71-437b-9ac2-5ca44830f3a5" containerID="8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587" exitCode=0 Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.400122 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerDied","Data":"8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.401441 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.403012 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.403043 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.403053 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.403064 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.403074 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.418069 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.437756 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.458872 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.475095 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.491268 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.504918 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.506915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.506956 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.506967 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.506989 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.507003 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.523158 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.537931 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.556217 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.571947 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.603321 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.610913 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.610973 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.610986 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.611010 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.611027 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.616535 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.629291 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.644727 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.659133 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.676731 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.705474 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.713817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.713886 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.713901 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.713923 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.713936 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.719887 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.817183 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.817228 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.817239 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.817257 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.817268 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.921154 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.921205 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.921218 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.921236 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:04 crc kubenswrapper[4885]: I0130 00:09:04.921249 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:04Z","lastTransitionTime":"2026-01-30T00:09:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.025033 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.025372 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.025473 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.025593 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.025686 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.089269 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 03:59:52.383042881 +0000 UTC Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.129085 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.129137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.129150 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.129173 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.129189 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.232237 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.232276 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.232285 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.232300 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.232312 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.335882 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.335925 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.335937 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.335957 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.335969 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.408058 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerStarted","Data":"86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.431119 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.439290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.439330 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.439341 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.439361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.439375 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.453091 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.471323 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.488590 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.507891 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.524865 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.542602 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.542932 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.543043 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.543141 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.543220 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.544520 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.564208 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.589991 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.611600 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.646814 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.646878 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.646894 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.646918 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.646937 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.651862 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.668827 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.689239 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.697621 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.702926 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.723283 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.750759 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.750837 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.750850 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.750871 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.750886 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.753588 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.769840 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.789508 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.808951 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.828612 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.855322 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.855382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.855393 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.855417 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.855435 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.857094 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.881596 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.904095 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.926659 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.951018 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.959009 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.959085 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.959109 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.959138 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.959159 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:05Z","lastTransitionTime":"2026-01-30T00:09:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:05 crc kubenswrapper[4885]: I0130 00:09:05.975375 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.026052 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:05Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.045534 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.062439 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.062489 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.062508 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.062534 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.062551 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.078989 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.090863 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 04:30:43.525249985 +0000 UTC Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.099278 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.141727 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.141952 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.142967 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:06 crc kubenswrapper[4885]: E0130 00:09:06.143364 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:06 crc kubenswrapper[4885]: E0130 00:09:06.144419 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:06 crc kubenswrapper[4885]: E0130 00:09:06.144533 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.166516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.166614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.166651 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.166691 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.166719 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.271417 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.271479 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.271491 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.271508 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.271520 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.374826 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.374904 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.374926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.374957 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.374978 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.415825 4885 generic.go:334] "Generic (PLEG): container finished" podID="b01f0fb2-4c71-437b-9ac2-5ca44830f3a5" containerID="86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e" exitCode=0 Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.415933 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerDied","Data":"86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.428618 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.430071 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.430188 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.438912 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.467851 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.468979 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.479229 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.481935 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.482000 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.482022 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.482056 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.482080 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.498094 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.529360 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.548670 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.570535 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.585913 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.585968 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.585977 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.585995 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.586005 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.586504 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.604202 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.620705 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.638740 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.660307 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.679757 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.690372 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.690469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.690490 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.690517 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.690540 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.711113 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.728268 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.744990 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.764998 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.784238 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.794155 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.794217 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.794229 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.794255 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.794271 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.807204 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.842293 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.854752 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.883728 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.897159 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.897242 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.897266 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.897297 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.897318 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:06Z","lastTransitionTime":"2026-01-30T00:09:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.910370 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.940581 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.956983 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.973319 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:06 crc kubenswrapper[4885]: I0130 00:09:06.986541 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.000183 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:06Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.000412 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.000456 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.000469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.000488 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.000503 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.022812 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.040431 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.057830 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.092057 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 00:59:42.310638744 +0000 UTC Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.103337 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.103403 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.103421 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.103437 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.103449 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.205992 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.206078 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.206103 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.206132 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.206154 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.308349 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.308390 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.308398 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.308411 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.308425 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.412188 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.412267 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.412287 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.412321 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.412346 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.438726 4885 generic.go:334] "Generic (PLEG): container finished" podID="b01f0fb2-4c71-437b-9ac2-5ca44830f3a5" containerID="1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6" exitCode=0 Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.438839 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerDied","Data":"1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.439023 4885 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.466445 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.490610 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.513474 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.515426 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.515702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.515957 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.516233 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.516409 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.538746 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.555274 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.582065 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.600701 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.619805 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.619867 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.619888 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.619915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.619935 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.630404 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.647398 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.662266 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.686143 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.703905 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.724129 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.724759 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.724819 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.724834 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.724885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.724906 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.751325 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.770638 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:07Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.828971 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.829025 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.829039 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.829061 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.829074 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.932073 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.932112 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.932121 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.932133 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:07 crc kubenswrapper[4885]: I0130 00:09:07.932144 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:07Z","lastTransitionTime":"2026-01-30T00:09:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.035490 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.035979 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.036005 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.036028 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.036046 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.092797 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 20:52:27.127490112 +0000 UTC Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.139102 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.139147 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.139158 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.139176 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.139193 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.141964 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.141978 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:08 crc kubenswrapper[4885]: E0130 00:09:08.142231 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.142003 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:08 crc kubenswrapper[4885]: E0130 00:09:08.142394 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:08 crc kubenswrapper[4885]: E0130 00:09:08.142459 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.242520 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.242556 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.242568 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.242585 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.242596 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.351004 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.351086 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.351112 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.351144 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.351170 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.447654 4885 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.448670 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" event={"ID":"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5","Type":"ContainerStarted","Data":"04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.454648 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.454725 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.454754 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.454819 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.454838 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.472233 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.493612 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.517930 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.554608 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.557301 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.557349 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.557369 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.557393 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.557412 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.575735 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.594511 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.613938 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.631408 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.660616 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.660692 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.660717 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.660751 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.660807 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.663652 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.683842 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.706625 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.730092 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.751976 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.764214 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.764264 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.764284 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.764312 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.764332 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.777361 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.807673 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:08Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.867340 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.867425 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.867445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.867473 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.867494 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.969628 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.969694 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.969713 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.969738 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:08 crc kubenswrapper[4885]: I0130 00:09:08.969762 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:08Z","lastTransitionTime":"2026-01-30T00:09:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.072629 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.072681 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.072692 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.072713 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.072726 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.093084 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 05:22:27.169292386 +0000 UTC Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.175532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.175596 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.175608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.175628 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.175639 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.278565 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.278637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.278646 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.278668 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.278687 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.381397 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.381455 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.381465 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.381486 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.381498 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.485112 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.485164 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.485182 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.485207 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.485227 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.588351 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.588480 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.588495 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.588512 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.588524 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.691652 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.691725 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.691745 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.691809 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.691830 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.775579 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.775646 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.775874 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.775893 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.775928 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.775947 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.775958 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.775968 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.776058 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:25.776030601 +0000 UTC m=+52.367502379 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.776093 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:25.776080172 +0000 UTC m=+52.367551950 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.796532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.796588 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.796609 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.796635 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.796652 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.876717 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.876959 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:09:25.876918521 +0000 UTC m=+52.468390309 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.900538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.900637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.900664 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.900690 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.900709 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:09Z","lastTransitionTime":"2026-01-30T00:09:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.978399 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:09 crc kubenswrapper[4885]: I0130 00:09:09.978467 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.978609 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.978652 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.978734 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:25.978695575 +0000 UTC m=+52.570167333 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:09 crc kubenswrapper[4885]: E0130 00:09:09.978799 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:25.978746066 +0000 UTC m=+52.570217854 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.004439 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.004505 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.004523 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.004552 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.004572 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.093836 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 17:12:31.333674017 +0000 UTC Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.107483 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.107570 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.107597 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.107632 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.107661 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.141940 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.141943 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.142102 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.142281 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.142424 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.142728 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.210374 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.210418 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.210427 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.210445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.210456 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.221949 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.222010 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.222021 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.222043 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.222056 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.242752 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.247221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.247292 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.247316 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.247339 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.247353 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.263439 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.266899 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.266932 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.266947 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.266966 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.266984 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.278404 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.281342 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.281376 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.281384 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.281397 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.281407 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.292234 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.295400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.295434 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.295445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.295461 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.295477 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.306452 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: E0130 00:09:10.306570 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.313216 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.313249 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.313259 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.313274 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.313289 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.416534 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.416571 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.416580 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.416595 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.416608 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.457176 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/0.log" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.460641 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03" exitCode=1 Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.460716 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.462153 4885 scope.go:117] "RemoveContainer" containerID="0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.485451 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.503412 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.522110 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.522166 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.522179 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.522198 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.522211 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.531707 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.560711 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:09Z\\\",\\\"message\\\":\\\" for removal\\\\nI0130 00:09:09.622273 6153 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 00:09:09.622283 6153 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 00:09:09.622319 6153 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 00:09:09.622329 6153 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 00:09:09.622337 6153 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 00:09:09.622365 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:09:09.622818 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:09:09.623413 6153 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 00:09:09.623445 6153 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 00:09:09.623470 6153 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0130 00:09:09.623498 6153 factory.go:656] Stopping watch factory\\\\nI0130 00:09:09.623519 6153 ovnkube.go:599] Stopped ovnkube\\\\nI0130 00:09:09.623552 6153 handler.go:208] Removed *v1.Node event handler 7\\\\nI0130 00:09:09.623569 6153 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.576248 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.593190 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.610011 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.625136 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.625206 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.625229 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.625258 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.625277 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.632495 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.653369 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.667741 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.685594 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.699077 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.723417 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.733337 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.733370 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.733381 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.733394 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.733407 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.735934 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.752055 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:10Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.835564 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.835608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.835616 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.835631 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.835641 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.939170 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.939263 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.939290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.939330 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:10 crc kubenswrapper[4885]: I0130 00:09:10.939358 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:10Z","lastTransitionTime":"2026-01-30T00:09:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.042024 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.042077 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.042092 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.042115 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.042132 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.094859 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 16:18:37.822515001 +0000 UTC Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.144753 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.144841 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.144861 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.144887 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.144905 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.247848 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.247904 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.247917 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.247936 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.247951 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.350627 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.350695 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.350713 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.350738 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.350756 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.453504 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.453547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.453561 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.453578 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.453592 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.465990 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/0.log" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.470347 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.470515 4885 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.492796 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.505964 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.515254 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.532505 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.544680 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.556001 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.556049 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.556067 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.556087 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.556101 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.565495 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.596866 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:09Z\\\",\\\"message\\\":\\\" for removal\\\\nI0130 00:09:09.622273 6153 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 00:09:09.622283 6153 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 00:09:09.622319 6153 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 00:09:09.622329 6153 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 00:09:09.622337 6153 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 00:09:09.622365 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:09:09.622818 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:09:09.623413 6153 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 00:09:09.623445 6153 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 00:09:09.623470 6153 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0130 00:09:09.623498 6153 factory.go:656] Stopping watch factory\\\\nI0130 00:09:09.623519 6153 ovnkube.go:599] Stopped ovnkube\\\\nI0130 00:09:09.623552 6153 handler.go:208] Removed *v1.Node event handler 7\\\\nI0130 00:09:09.623569 6153 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.615520 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.632636 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.652981 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.659733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.659812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.659831 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.659858 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.659876 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.667582 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.686046 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.701132 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.716877 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.739123 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:11Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.762882 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.762983 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.763009 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.763045 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.763073 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.866225 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.866272 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.866284 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.866301 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.866343 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.969360 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.969443 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.969467 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.969498 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:11 crc kubenswrapper[4885]: I0130 00:09:11.969522 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:11Z","lastTransitionTime":"2026-01-30T00:09:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.072070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.072191 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.072209 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.072234 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.072254 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.095730 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 11:39:29.79784051 +0000 UTC Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.141267 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.141355 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.141295 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:12 crc kubenswrapper[4885]: E0130 00:09:12.141503 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:12 crc kubenswrapper[4885]: E0130 00:09:12.141622 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:12 crc kubenswrapper[4885]: E0130 00:09:12.141763 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.175276 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.175338 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.175356 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.175379 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.175396 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.278451 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.278507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.278524 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.278547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.278567 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.381795 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.381859 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.381876 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.381901 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.381919 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.476460 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/1.log" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.477421 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/0.log" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.482499 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a" exitCode=1 Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.482573 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.482656 4885 scope.go:117] "RemoveContainer" containerID="0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.483857 4885 scope.go:117] "RemoveContainer" containerID="ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a" Jan 30 00:09:12 crc kubenswrapper[4885]: E0130 00:09:12.484170 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.488423 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.488491 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.488505 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.488521 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.488531 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.507599 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.527955 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.548742 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.569612 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.583292 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.591433 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.591459 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.591469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.591484 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.591494 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.598060 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.609134 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.625752 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.636337 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.655437 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.681370 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:09Z\\\",\\\"message\\\":\\\" for removal\\\\nI0130 00:09:09.622273 6153 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 00:09:09.622283 6153 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 00:09:09.622319 6153 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 00:09:09.622329 6153 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 00:09:09.622337 6153 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 00:09:09.622365 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:09:09.622818 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:09:09.623413 6153 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 00:09:09.623445 6153 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 00:09:09.623470 6153 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0130 00:09:09.623498 6153 factory.go:656] Stopping watch factory\\\\nI0130 00:09:09.623519 6153 ovnkube.go:599] Stopped ovnkube\\\\nI0130 00:09:09.623552 6153 handler.go:208] Removed *v1.Node event handler 7\\\\nI0130 00:09:09.623569 6153 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.695180 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.695218 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.695230 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.695246 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.695259 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.704713 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.722408 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.742344 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.763368 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.798400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.798465 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.798479 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.798500 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.798513 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.901741 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.901877 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.901897 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.901929 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:12 crc kubenswrapper[4885]: I0130 00:09:12.901951 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:12Z","lastTransitionTime":"2026-01-30T00:09:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.004798 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.004849 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.004862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.004883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.004897 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.096726 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 11:14:44.540834026 +0000 UTC Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.108022 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.108120 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.108140 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.108166 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.108185 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.199290 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr"] Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.200209 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.205105 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.206317 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.211390 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.211450 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.211475 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.211505 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.211529 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.238729 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c84331482b43ab4bc489e158c2efe4776e55525d0f82ce12d39127a01f3be03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:09Z\\\",\\\"message\\\":\\\" for removal\\\\nI0130 00:09:09.622273 6153 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0130 00:09:09.622283 6153 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0130 00:09:09.622319 6153 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0130 00:09:09.622329 6153 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0130 00:09:09.622337 6153 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0130 00:09:09.622365 6153 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:09:09.622818 6153 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:09:09.623413 6153 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0130 00:09:09.623445 6153 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0130 00:09:09.623470 6153 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0130 00:09:09.623498 6153 factory.go:656] Stopping watch factory\\\\nI0130 00:09:09.623519 6153 ovnkube.go:599] Stopped ovnkube\\\\nI0130 00:09:09.623552 6153 handler.go:208] Removed *v1.Node event handler 7\\\\nI0130 00:09:09.623569 6153 handler.go:208] Removed *v1.Node event handler 2\\\\nI0130 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.258459 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.279964 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.296908 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.310865 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.311027 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l68rg\" (UniqueName: \"kubernetes.io/projected/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-kube-api-access-l68rg\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.311063 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.311088 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.314529 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.314593 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.314614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.314640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.314657 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.317906 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.345216 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.364790 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.370276 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.378920 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.390882 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.406661 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.411638 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l68rg\" (UniqueName: \"kubernetes.io/projected/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-kube-api-access-l68rg\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.411687 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.411724 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.411757 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.412492 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.412844 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.417540 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.417873 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.417891 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.417914 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.417928 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.421097 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.426748 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.428498 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l68rg\" (UniqueName: \"kubernetes.io/projected/6b25dd46-9353-45e2-86c3-ba3cdb6592e6-kube-api-access-l68rg\") pod \"ovnkube-control-plane-749d76644c-pdvgr\" (UID: \"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.442734 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.455730 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.467334 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.488467 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/1.log" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.492740 4885 scope.go:117] "RemoveContainer" containerID="ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a" Jan 30 00:09:13 crc kubenswrapper[4885]: E0130 00:09:13.492924 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.493182 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.505842 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.520967 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.521000 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.521008 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.521021 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.521032 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.521285 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.524223 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" Jan 30 00:09:13 crc kubenswrapper[4885]: W0130 00:09:13.546311 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b25dd46_9353_45e2_86c3_ba3cdb6592e6.slice/crio-49256150d021d4f9a03866862a2df2eeeafeeb2f99adf376026a72bccc4863c4 WatchSource:0}: Error finding container 49256150d021d4f9a03866862a2df2eeeafeeb2f99adf376026a72bccc4863c4: Status 404 returned error can't find the container with id 49256150d021d4f9a03866862a2df2eeeafeeb2f99adf376026a72bccc4863c4 Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.546521 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.562819 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.574587 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.586563 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.600119 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.614081 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.634207 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.634286 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.634306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.634332 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.634350 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.634517 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.656845 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.677192 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.692015 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.707449 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.724925 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.737267 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.737329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.737344 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.737361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.737373 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.740702 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.758119 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.769596 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.839863 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.839926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.839938 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.839955 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.839964 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.942840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.942890 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.942899 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.942914 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.942926 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:13Z","lastTransitionTime":"2026-01-30T00:09:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.948573 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-hg2nk"] Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.949152 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:13 crc kubenswrapper[4885]: E0130 00:09:13.949235 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.967231 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.980364 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:13 crc kubenswrapper[4885]: I0130 00:09:13.998313 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:13Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.018656 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dfbg\" (UniqueName: \"kubernetes.io/projected/313f7566-bae9-4b9c-8c30-9e3c7aef8364-kube-api-access-8dfbg\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.018731 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.028064 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.045572 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.045625 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.045639 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.045657 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.045670 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.065142 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.084044 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.097691 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 12:20:11.016948645 +0000 UTC Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.107871 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.120470 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.120569 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dfbg\" (UniqueName: \"kubernetes.io/projected/313f7566-bae9-4b9c-8c30-9e3c7aef8364-kube-api-access-8dfbg\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.120898 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.120939 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:14.620923844 +0000 UTC m=+41.212395592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.125865 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.141008 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.141134 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.141189 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.141157 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.141359 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.141439 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.141497 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dfbg\" (UniqueName: \"kubernetes.io/projected/313f7566-bae9-4b9c-8c30-9e3c7aef8364-kube-api-access-8dfbg\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.144617 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.147450 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.147490 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.147499 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.147514 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.147527 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.159561 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.172036 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.185186 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.193745 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.210928 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.230144 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.249049 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.249095 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.249066 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.249125 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.249212 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.249224 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.261523 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.273909 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.287313 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.299498 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.317025 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.327755 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.344134 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.354470 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.354538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.354555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.354578 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.354596 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.359322 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.373410 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.390934 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.412047 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.435721 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.448668 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.456619 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.456714 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.456728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.456744 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.456755 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.469244 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.485543 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.496480 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" event={"ID":"6b25dd46-9353-45e2-86c3-ba3cdb6592e6","Type":"ContainerStarted","Data":"e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.496536 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" event={"ID":"6b25dd46-9353-45e2-86c3-ba3cdb6592e6","Type":"ContainerStarted","Data":"f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.496555 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" event={"ID":"6b25dd46-9353-45e2-86c3-ba3cdb6592e6","Type":"ContainerStarted","Data":"49256150d021d4f9a03866862a2df2eeeafeeb2f99adf376026a72bccc4863c4"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.499655 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.516016 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.528946 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.546371 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.559316 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.559347 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.559360 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.559375 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.559386 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.564279 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.579009 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.594736 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.609704 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.624493 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.626899 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.627160 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:14 crc kubenswrapper[4885]: E0130 00:09:14.627228 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:15.627211745 +0000 UTC m=+42.218683503 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.636852 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.656068 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.662079 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.662110 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.662119 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.662133 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.662141 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.670472 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.679819 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.696785 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.708569 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.719699 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.733434 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.750819 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.765253 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.765445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.765552 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.765639 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.765724 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.769266 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.780097 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.868608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.868653 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.868674 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.868702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.868725 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.971922 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.971987 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.972011 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.972040 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:14 crc kubenswrapper[4885]: I0130 00:09:14.972066 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:14Z","lastTransitionTime":"2026-01-30T00:09:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.075237 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.075297 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.075314 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.075340 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.075358 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.098832 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 22:34:03.204478935 +0000 UTC Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.178716 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.178852 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.178881 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.178912 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.178935 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.282867 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.282908 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.282926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.282950 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.282967 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.386366 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.386435 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.386640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.386674 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.386699 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.489942 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.489996 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.490012 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.490035 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.490053 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.593438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.593526 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.593547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.593581 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.593601 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.637794 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:15 crc kubenswrapper[4885]: E0130 00:09:15.637936 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:15 crc kubenswrapper[4885]: E0130 00:09:15.637997 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:17.637980417 +0000 UTC m=+44.229452175 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.697390 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.697501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.697530 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.697571 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.697597 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.801739 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.801834 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.801852 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.801882 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.801902 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.905239 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.905380 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.905415 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.905446 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:15 crc kubenswrapper[4885]: I0130 00:09:15.905481 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:15Z","lastTransitionTime":"2026-01-30T00:09:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.007946 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.008003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.008023 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.008043 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.008057 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.099903 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 07:01:30.456315593 +0000 UTC Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.111613 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.111691 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.111708 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.111736 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.111755 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.140804 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:16 crc kubenswrapper[4885]: E0130 00:09:16.140973 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.141254 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.141325 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.141446 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:16 crc kubenswrapper[4885]: E0130 00:09:16.141654 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:16 crc kubenswrapper[4885]: E0130 00:09:16.141863 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:16 crc kubenswrapper[4885]: E0130 00:09:16.142002 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.215147 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.215220 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.215241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.215268 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.215287 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.318819 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.318896 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.318924 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.318955 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.318980 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.422298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.422364 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.422381 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.422404 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.422422 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.526046 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.526222 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.526244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.526268 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.526290 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.630950 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.631009 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.631025 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.631048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.631066 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.734720 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.734810 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.734829 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.734854 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.734871 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.838382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.838448 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.838466 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.838491 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.838509 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.941348 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.941414 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.941436 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.941466 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:16 crc kubenswrapper[4885]: I0130 00:09:16.941487 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:16Z","lastTransitionTime":"2026-01-30T00:09:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.044863 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.044940 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.044964 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.044993 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.045014 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.101013 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 17:31:33.06117166 +0000 UTC Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.148658 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.148728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.148747 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.148801 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.148820 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.251665 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.251717 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.251733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.251755 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.251807 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.355532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.355601 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.355628 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.355658 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.355680 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.459445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.459517 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.459538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.459564 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.459584 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.562262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.562319 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.562336 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.562364 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.562382 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.661496 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:17 crc kubenswrapper[4885]: E0130 00:09:17.661851 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:17 crc kubenswrapper[4885]: E0130 00:09:17.661980 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:21.661944758 +0000 UTC m=+48.253416676 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.666213 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.666364 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.666390 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.666419 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.666443 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.769749 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.770218 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.770379 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.770526 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.770677 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.873515 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.873580 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.873602 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.873631 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.873655 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.977068 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.977120 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.977135 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.977157 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:17 crc kubenswrapper[4885]: I0130 00:09:17.977175 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:17Z","lastTransitionTime":"2026-01-30T00:09:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.081532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.081614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.081632 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.081657 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.081673 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.102162 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 21:11:55.859226098 +0000 UTC Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.141999 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.142132 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.142206 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:18 crc kubenswrapper[4885]: E0130 00:09:18.142204 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.141999 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:18 crc kubenswrapper[4885]: E0130 00:09:18.142369 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:18 crc kubenswrapper[4885]: E0130 00:09:18.142499 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:18 crc kubenswrapper[4885]: E0130 00:09:18.142606 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.185228 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.185822 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.186002 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.186205 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.186451 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.289482 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.289538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.289551 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.289570 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.289582 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.392726 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.392804 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.392822 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.392845 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.392864 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.495717 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.495763 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.495827 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.495850 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.495869 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.598293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.598872 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.599134 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.599329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.599496 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.701992 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.702033 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.702045 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.702064 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.702075 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.805394 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.805452 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.805470 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.805494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.805512 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.912345 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.912422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.912446 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.912474 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:18 crc kubenswrapper[4885]: I0130 00:09:18.912496 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:18Z","lastTransitionTime":"2026-01-30T00:09:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.015700 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.015831 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.015867 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.015891 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.015908 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.102868 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 23:16:30.416932191 +0000 UTC Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.119235 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.119285 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.119303 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.119331 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.119352 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.222283 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.222342 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.222359 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.222381 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.222398 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.325281 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.325400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.325422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.325445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.325463 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.429278 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.429878 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.430103 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.430321 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.430501 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.534440 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.535144 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.535298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.535412 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.535520 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.642739 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.642885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.642900 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.642923 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.642937 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.745430 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.745507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.745524 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.745555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.745580 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.848854 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.848953 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.848976 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.849004 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.849023 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.953182 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.953245 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.953262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.953286 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:19 crc kubenswrapper[4885]: I0130 00:09:19.953304 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:19Z","lastTransitionTime":"2026-01-30T00:09:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.056444 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.056493 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.056503 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.056521 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.056534 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.103523 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 03:40:34.549394778 +0000 UTC Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.142119 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.142601 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.142895 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.143199 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.143486 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.143706 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.144149 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.144071 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.160799 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.160848 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.160860 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.160885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.160898 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.263593 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.263641 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.263658 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.263678 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.263692 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.366660 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.366737 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.366761 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.366844 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.366867 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.470180 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.470238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.470255 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.470278 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.470296 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.573849 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.573912 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.573929 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.573952 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.573968 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.614246 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.614310 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.614334 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.614361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.614384 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.636589 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:20Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.642758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.642876 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.642897 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.642918 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.642936 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.661199 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:20Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.665807 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.665884 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.665896 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.665920 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.665934 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.679613 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:20Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.684541 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.684595 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.684614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.684638 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.684658 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.703034 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:20Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.708354 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.708405 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.708423 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.708444 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.708462 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.730439 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:20Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:20 crc kubenswrapper[4885]: E0130 00:09:20.730568 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.732699 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.732734 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.732745 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.732790 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.732804 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.835958 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.836039 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.836060 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.836088 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.836115 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.940396 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.940457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.940481 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.940511 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:20 crc kubenswrapper[4885]: I0130 00:09:20.940537 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:20Z","lastTransitionTime":"2026-01-30T00:09:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.044601 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.044659 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.044677 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.044702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.044724 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.104100 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 00:11:23.408512783 +0000 UTC Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.148402 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.148461 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.148481 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.148506 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.148526 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.251945 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.251997 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.252016 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.252039 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.252057 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.355252 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.355319 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.355336 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.355361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.355378 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.458988 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.459112 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.459141 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.459176 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.459202 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.562333 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.562387 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.562400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.562426 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.562445 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.666230 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.666275 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.666290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.666315 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.666334 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.729830 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:21 crc kubenswrapper[4885]: E0130 00:09:21.730349 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:21 crc kubenswrapper[4885]: E0130 00:09:21.730720 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:29.730641997 +0000 UTC m=+56.322113785 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.769410 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.769492 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.769516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.769547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.769591 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.872233 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.872298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.872317 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.872342 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.872369 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.975893 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.975976 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.976011 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.976052 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:21 crc kubenswrapper[4885]: I0130 00:09:21.976079 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:21Z","lastTransitionTime":"2026-01-30T00:09:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.079579 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.079652 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.079671 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.079703 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.079724 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.104764 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 03:29:49.708372627 +0000 UTC Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.141988 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.141995 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:22 crc kubenswrapper[4885]: E0130 00:09:22.142237 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.142020 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:22 crc kubenswrapper[4885]: E0130 00:09:22.142419 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.142415 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:22 crc kubenswrapper[4885]: E0130 00:09:22.142513 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:22 crc kubenswrapper[4885]: E0130 00:09:22.142966 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.182900 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.182987 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.183007 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.183040 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.183064 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.286438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.286503 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.286522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.286546 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.286567 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.390667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.390833 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.390862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.390897 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.390922 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.494400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.494491 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.494516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.494546 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.494570 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.598513 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.598583 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.598603 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.598632 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.598651 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.701715 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.701821 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.701882 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.701910 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.701933 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.805500 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.805572 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.805590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.805621 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.805643 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.909284 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.909358 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.909385 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.909421 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:22 crc kubenswrapper[4885]: I0130 00:09:22.909443 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:22Z","lastTransitionTime":"2026-01-30T00:09:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.013338 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.013444 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.013464 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.013492 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.013512 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.105805 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 09:30:19.177412511 +0000 UTC Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.116607 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.116671 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.116690 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.116718 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.116736 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.220228 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.220283 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.220296 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.220318 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.220336 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.324007 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.324089 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.324107 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.324132 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.324148 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.428186 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.428270 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.428296 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.428330 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.428354 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.533244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.533355 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.533590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.533673 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.533735 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.637555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.637631 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.637649 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.637676 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.637698 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.741034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.741087 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.741099 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.741118 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.741132 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.849808 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.849934 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.849961 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.849991 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.850010 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.954147 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.954218 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.954231 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.954254 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:23 crc kubenswrapper[4885]: I0130 00:09:23.954271 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:23Z","lastTransitionTime":"2026-01-30T00:09:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.058249 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.058322 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.058341 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.058368 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.058391 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.061867 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.074415 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.086013 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.099526 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.105997 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 09:09:02.077715558 +0000 UTC Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.115338 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.137591 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.140941 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.141176 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:24 crc kubenswrapper[4885]: E0130 00:09:24.141488 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.141858 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.142005 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:24 crc kubenswrapper[4885]: E0130 00:09:24.142202 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:24 crc kubenswrapper[4885]: E0130 00:09:24.142405 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:24 crc kubenswrapper[4885]: E0130 00:09:24.142524 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.154192 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.161369 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.161419 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.161433 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.161454 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.161473 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.167899 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.183092 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.203221 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.215975 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.227395 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.243125 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.256362 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.265000 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.265050 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.265065 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.265091 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.265108 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.274495 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.289856 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.303883 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.334316 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.351982 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.367586 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.367673 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.367693 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.367746 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.367766 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.374158 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.388045 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.399845 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.414520 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.433666 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.449091 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.470667 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.471241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.471345 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.471382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.471407 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.471471 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.486736 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.524687 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.547664 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.566699 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.575065 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.575136 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.575165 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.575201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.575227 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.586464 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.604157 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.625684 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.643750 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.665400 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.679620 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.679880 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.680015 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.680170 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.680292 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.702207 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.724558 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:24Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.783554 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.783605 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.783622 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.783640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.783658 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.886589 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.886662 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.886675 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.886703 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.886717 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.989532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.989579 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.989591 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.989613 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:24 crc kubenswrapper[4885]: I0130 00:09:24.989626 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:24Z","lastTransitionTime":"2026-01-30T00:09:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.092733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.092868 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.092898 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.092932 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.092954 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.107004 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 02:33:59.167956785 +0000 UTC Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.196535 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.196587 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.196603 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.196651 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.196668 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.301017 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.301150 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.301173 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.301200 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.301221 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.404201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.404257 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.404272 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.404293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.404308 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.506746 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.506840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.506859 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.506893 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.506911 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.610494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.610567 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.610585 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.610610 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.610627 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.713631 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.713691 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.713702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.713722 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.713735 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.783696 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.783749 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.783926 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.783950 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.783926 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.783980 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.783994 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.783963 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.784063 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:57.784039723 +0000 UTC m=+84.375511471 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.784098 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:57.784084724 +0000 UTC m=+84.375556472 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.817009 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.817056 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.817069 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.817090 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.817105 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.884418 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.884606 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:09:57.884578157 +0000 UTC m=+84.476049905 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.919665 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.919728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.919740 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.919764 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.919810 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:25Z","lastTransitionTime":"2026-01-30T00:09:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.985849 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:25 crc kubenswrapper[4885]: I0130 00:09:25.985929 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.986120 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.986207 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:57.986181678 +0000 UTC m=+84.577653456 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.986309 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:25 crc kubenswrapper[4885]: E0130 00:09:25.986351 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:57.986338862 +0000 UTC m=+84.577810640 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.022759 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.022862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.022881 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.022909 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.022931 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.107947 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 18:55:37.079922785 +0000 UTC Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.126386 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.126418 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.126426 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.126443 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.126454 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.141939 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.142076 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.142147 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.142262 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:26 crc kubenswrapper[4885]: E0130 00:09:26.142278 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:26 crc kubenswrapper[4885]: E0130 00:09:26.142471 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:26 crc kubenswrapper[4885]: E0130 00:09:26.142532 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:26 crc kubenswrapper[4885]: E0130 00:09:26.142688 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.229293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.229336 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.229356 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.229373 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.229382 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.333507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.333575 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.333585 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.333605 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.333616 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.438154 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.438232 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.438253 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.438285 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.438308 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.542811 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.542925 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.542952 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.543233 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.543257 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.647446 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.647545 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.647571 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.647605 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.647633 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.751442 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.751488 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.751500 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.751516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.751524 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.854909 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.854993 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.855011 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.855038 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.855056 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.958313 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.958403 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.958429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.958467 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:26 crc kubenswrapper[4885]: I0130 00:09:26.958493 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:26Z","lastTransitionTime":"2026-01-30T00:09:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.062445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.062524 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.062541 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.062573 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.062589 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.108988 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 05:30:04.959405965 +0000 UTC Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.143418 4885 scope.go:117] "RemoveContainer" containerID="ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.165955 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.166131 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.166189 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.166217 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.166236 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.269346 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.269401 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.269416 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.269438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.269455 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.371679 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.372099 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.372111 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.372129 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.372141 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.475085 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.475169 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.475195 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.475224 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.475245 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.553055 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/1.log" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.556503 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.557557 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.575147 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.577203 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.577246 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.577281 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.577299 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.577312 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.604378 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.621044 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.649313 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.675910 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.679599 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.679653 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.679667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.679688 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.679702 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.697541 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.714514 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.727985 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.739560 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.759728 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.771703 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.782107 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.782147 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.782160 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.782179 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.782195 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.784149 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.818617 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.834474 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.847106 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.862860 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.885005 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.885074 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.885098 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.885133 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.885155 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.887558 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.904101 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:27Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.988719 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.988801 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.988816 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.988836 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:27 crc kubenswrapper[4885]: I0130 00:09:27.988849 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:27Z","lastTransitionTime":"2026-01-30T00:09:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.092137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.092187 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.092202 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.092228 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.092243 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.109429 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:36:22.01784769 +0000 UTC Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.141097 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.141184 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:28 crc kubenswrapper[4885]: E0130 00:09:28.141297 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:28 crc kubenswrapper[4885]: E0130 00:09:28.141445 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.141581 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:28 crc kubenswrapper[4885]: E0130 00:09:28.141694 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.141864 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:28 crc kubenswrapper[4885]: E0130 00:09:28.141964 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.194684 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.194731 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.194741 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.194758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.194786 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.299080 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.299166 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.299192 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.299225 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.299257 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.401619 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.401698 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.401715 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.401734 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.401747 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.505175 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.505227 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.505238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.505256 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.505271 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.562851 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/2.log" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.563917 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/1.log" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.568110 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988" exitCode=1 Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.568167 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.568226 4885 scope.go:117] "RemoveContainer" containerID="ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.570013 4885 scope.go:117] "RemoveContainer" containerID="0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988" Jan 30 00:09:28 crc kubenswrapper[4885]: E0130 00:09:28.570365 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.591438 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.608482 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.608526 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.608538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.608556 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.608571 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.613337 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.630979 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.650126 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.665587 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.683089 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.703842 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.711353 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.711398 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.711409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.711428 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.711440 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.724690 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.740338 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.759818 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ffbfece66cc67b901f862713adbd65fca3c46774dc4e994a99dcce814759c59a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:11Z\\\",\\\"message\\\":\\\" 2.209008ms\\\\nI0130 00:09:11.434246 6316 services_controller.go:356] Processing sync for service openshift-marketplace/community-operators for network=default\\\\nI0130 00:09:11.434238 6316 services_controller.go:443] Built service openshift-machine-config-operator/machine-config-daemon LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9001, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.43\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8798, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF0130 00:09:11.434197 6316 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occur\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.772218 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.788059 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.804196 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.815226 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.815328 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.815343 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.815365 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.815401 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.823564 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.846627 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.868737 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.889434 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.906657 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:28Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.919290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.919351 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.919368 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.919396 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:28 crc kubenswrapper[4885]: I0130 00:09:28.919415 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:28Z","lastTransitionTime":"2026-01-30T00:09:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.022242 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.022308 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.022326 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.022353 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.022372 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.110469 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 10:53:25.423565899 +0000 UTC Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.125091 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.125152 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.125165 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.125186 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.125201 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.229298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.229441 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.229464 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.229490 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.229509 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.332053 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.332121 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.332139 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.332171 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.332191 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.436738 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.436830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.436854 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.436883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.436901 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.540624 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.540743 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.540828 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.540873 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.540899 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.575430 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/2.log" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.582701 4885 scope.go:117] "RemoveContainer" containerID="0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988" Jan 30 00:09:29 crc kubenswrapper[4885]: E0130 00:09:29.583111 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.600247 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.616916 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.641652 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.644123 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.644301 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.644324 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.644361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.644386 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.657997 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.674530 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.690958 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.713209 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.730441 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.741327 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:29 crc kubenswrapper[4885]: E0130 00:09:29.741625 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:29 crc kubenswrapper[4885]: E0130 00:09:29.741743 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:09:45.741714212 +0000 UTC m=+72.333186000 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.747494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.747527 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.747541 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.747559 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.747574 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.750716 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.785661 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.808743 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.828907 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.848473 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.850392 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.850458 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.850472 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.850497 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.850511 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.873915 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.890462 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.909365 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.934145 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.953274 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.953371 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.953387 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.953406 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.953419 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:29Z","lastTransitionTime":"2026-01-30T00:09:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:29 crc kubenswrapper[4885]: I0130 00:09:29.955977 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:29Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.056510 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.056577 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.056595 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.056621 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.056639 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.111705 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 00:14:27.762392575 +0000 UTC Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.140975 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.141033 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.141140 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.141313 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.141553 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.141630 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.141825 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.141983 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.159095 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.159166 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.159186 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.159210 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.159228 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.262206 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.262248 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.262265 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.262286 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.262302 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.365662 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.365728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.365745 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.365802 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.365821 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.470929 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.470984 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.470999 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.471017 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.471030 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.575687 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.575799 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.575821 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.575850 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.575873 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.680044 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.680140 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.680167 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.680205 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.680234 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.783638 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.783833 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.783862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.783902 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.783931 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.787137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.787264 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.787292 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.787321 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.787342 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.808809 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:30Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.814411 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.814476 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.814500 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.814526 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.814545 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.831976 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:30Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.837652 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.837715 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.837734 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.837759 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.837803 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.868584 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:30Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.875366 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.875458 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.875487 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.875522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.875547 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.898291 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:30Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.905485 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.905555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.905672 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.905705 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.905807 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.932040 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:30Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:30 crc kubenswrapper[4885]: E0130 00:09:30.932478 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.935572 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.935631 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.935657 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.935689 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:30 crc kubenswrapper[4885]: I0130 00:09:30.935751 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:30Z","lastTransitionTime":"2026-01-30T00:09:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.038905 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.039013 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.039035 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.039066 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.039084 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.112220 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 22:03:33.896716462 +0000 UTC Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.143221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.143267 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.143278 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.143294 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.143308 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.246509 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.246562 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.246573 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.246590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.246602 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.349824 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.349867 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.349882 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.349923 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.349935 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.456095 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.456203 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.456229 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.456263 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.456289 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.560013 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.560051 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.560063 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.560081 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.560092 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.664420 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.664493 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.664512 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.664541 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.664561 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.767915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.767987 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.768007 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.768034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.768055 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.871317 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.871388 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.871405 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.871437 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.871457 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.975018 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.975079 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.975096 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.975119 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:31 crc kubenswrapper[4885]: I0130 00:09:31.975137 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:31Z","lastTransitionTime":"2026-01-30T00:09:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.078498 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.078594 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.078636 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.078673 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.078698 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.112374 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 06:41:10.387660726 +0000 UTC Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.141820 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.141972 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:32 crc kubenswrapper[4885]: E0130 00:09:32.142087 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.142183 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.142645 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:32 crc kubenswrapper[4885]: E0130 00:09:32.142622 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:32 crc kubenswrapper[4885]: E0130 00:09:32.142865 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:32 crc kubenswrapper[4885]: E0130 00:09:32.143067 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.181763 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.181820 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.181830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.181848 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.181861 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.285405 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.285450 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.285465 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.285487 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.285500 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.389158 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.389236 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.389247 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.389271 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.389285 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.493059 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.493109 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.493119 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.493138 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.493151 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.595458 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.595549 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.595581 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.595612 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.595631 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.698471 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.698520 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.698531 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.698551 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.698563 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.802022 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.802085 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.802096 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.802112 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.802125 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.905389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.905479 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.905501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.905534 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:32 crc kubenswrapper[4885]: I0130 00:09:32.905557 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:32Z","lastTransitionTime":"2026-01-30T00:09:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.008687 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.008744 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.008758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.008796 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.008821 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.112525 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 13:38:58.529215133 +0000 UTC Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.112733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.112822 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.112840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.112865 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.112883 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.216399 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.216476 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.216496 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.216523 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.216544 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.319733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.319820 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.319842 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.319869 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.319887 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.423481 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.423547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.423563 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.423582 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.423598 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.528110 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.528196 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.528222 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.528256 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.528280 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.638097 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.638166 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.638179 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.638196 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.638208 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.742542 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.742613 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.742627 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.742652 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.742668 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.846529 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.846595 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.846616 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.846645 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.846673 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.950269 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.950329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.950342 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.950364 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:33 crc kubenswrapper[4885]: I0130 00:09:33.950378 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:33Z","lastTransitionTime":"2026-01-30T00:09:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.052644 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.052704 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.052724 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.052752 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.052795 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.113026 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 15:04:57.148029859 +0000 UTC Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.140869 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.140938 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.140874 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:34 crc kubenswrapper[4885]: E0130 00:09:34.141189 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.141211 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:34 crc kubenswrapper[4885]: E0130 00:09:34.141057 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:34 crc kubenswrapper[4885]: E0130 00:09:34.141429 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:34 crc kubenswrapper[4885]: E0130 00:09:34.141524 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.155828 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.155883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.155896 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.155916 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.155935 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.175418 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.192130 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.204652 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.217376 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.227638 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.238578 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.249170 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.257999 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.258024 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.258031 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.258044 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.258053 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.262022 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.280587 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.293608 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.310296 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.325452 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.338806 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.357100 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.361118 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.361210 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.361226 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.361257 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.361274 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.371006 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.383482 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.398936 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.413890 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:34Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.466279 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.466336 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.466356 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.466380 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.466398 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.569305 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.569396 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.569419 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.569444 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.569461 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.672070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.672423 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.672519 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.672611 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.672693 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.776827 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.776904 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.776921 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.776947 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.776966 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.882538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.883389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.883651 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.883819 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.883916 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.989122 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.989210 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.989228 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.989260 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:34 crc kubenswrapper[4885]: I0130 00:09:34.989280 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:34Z","lastTransitionTime":"2026-01-30T00:09:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.093610 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.093688 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.093716 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.093746 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.093763 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.113467 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 09:35:10.891009233 +0000 UTC Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.197262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.197309 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.197319 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.197337 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.197349 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.301327 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.301399 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.301411 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.301432 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.301445 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.405253 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.405316 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.405325 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.405344 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.405355 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.508958 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.509011 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.509028 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.509049 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.509065 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.611647 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.611731 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.611751 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.611812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.611841 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.721486 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.721562 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.721584 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.721611 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.721628 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.828418 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.828492 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.828509 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.828535 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.828555 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.931352 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.931412 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.931431 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.931457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:35 crc kubenswrapper[4885]: I0130 00:09:35.931476 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:35Z","lastTransitionTime":"2026-01-30T00:09:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.034807 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.034874 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.034894 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.034922 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.034944 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.113711 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 23:31:05.789023535 +0000 UTC Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.138382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.138438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.138460 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.138482 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.138498 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.141754 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.141856 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.141914 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:36 crc kubenswrapper[4885]: E0130 00:09:36.142074 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:36 crc kubenswrapper[4885]: E0130 00:09:36.142267 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.142457 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:36 crc kubenswrapper[4885]: E0130 00:09:36.142461 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:36 crc kubenswrapper[4885]: E0130 00:09:36.143022 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.242562 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.242637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.242654 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.242685 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.242706 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.346251 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.346326 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.346349 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.346376 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.346397 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.451501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.451545 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.451557 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.451575 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.451589 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.554555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.554883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.554980 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.555070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.555470 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.658808 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.658860 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.658878 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.658901 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.658920 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.761830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.761883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.761899 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.761924 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.761942 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.865155 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.865205 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.865221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.865244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.865294 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.968709 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.968786 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.968797 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.968817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:36 crc kubenswrapper[4885]: I0130 00:09:36.968827 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:36Z","lastTransitionTime":"2026-01-30T00:09:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.072515 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.072587 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.072598 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.072617 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.072629 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.113983 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 23:12:19.788548686 +0000 UTC Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.176264 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.176322 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.176335 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.176357 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.176376 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.279830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.279906 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.279923 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.279951 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.279967 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.382714 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.383221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.383387 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.383470 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.383530 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.487706 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.487859 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.487889 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.487929 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.487955 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.591151 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.591244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.591265 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.591298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.591318 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.695244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.695305 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.695323 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.695349 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.695370 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.797952 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.798033 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.798048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.798068 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.798081 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.902810 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.903297 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.903387 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.903494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:37 crc kubenswrapper[4885]: I0130 00:09:37.903587 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:37Z","lastTransitionTime":"2026-01-30T00:09:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.007188 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.007262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.007285 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.007317 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.007342 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.111579 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.111642 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.111653 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.111673 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.111687 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.114996 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 12:54:11.530904036 +0000 UTC Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.141487 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.141528 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.141537 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:38 crc kubenswrapper[4885]: E0130 00:09:38.141644 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.141834 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:38 crc kubenswrapper[4885]: E0130 00:09:38.141865 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:38 crc kubenswrapper[4885]: E0130 00:09:38.141889 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:38 crc kubenswrapper[4885]: E0130 00:09:38.142029 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.215651 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.215706 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.215732 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.215757 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.215789 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.318167 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.318238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.318258 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.318287 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.318307 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.421469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.421530 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.421547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.421573 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.421590 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.524356 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.524419 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.524437 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.524462 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.524479 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.628389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.628449 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.628462 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.628481 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.628499 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.731361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.731409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.731419 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.731437 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.731450 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.833742 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.833827 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.833847 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.833872 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.833892 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.936671 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.936735 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.936754 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.936803 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:38 crc kubenswrapper[4885]: I0130 00:09:38.936823 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:38Z","lastTransitionTime":"2026-01-30T00:09:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.040174 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.040242 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.040255 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.040274 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.040288 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.115599 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 10:23:12.624184326 +0000 UTC Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.143384 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.143451 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.143473 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.143494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.143512 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.246090 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.246150 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.246162 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.246190 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.246202 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.349167 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.349206 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.349217 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.349230 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.349241 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.452826 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.452875 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.452884 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.452900 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.452911 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.555633 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.555687 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.555697 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.555716 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.555726 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.658414 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.658482 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.658501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.658532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.658548 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.760744 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.760802 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.760812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.760829 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.760839 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.863441 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.863492 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.863501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.863517 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.863530 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.966746 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.966870 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.966894 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.966926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:39 crc kubenswrapper[4885]: I0130 00:09:39.966948 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:39Z","lastTransitionTime":"2026-01-30T00:09:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.072427 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.072512 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.072534 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.072566 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.072585 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.115940 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 22:26:30.372480477 +0000 UTC Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.141928 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.141972 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.142016 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:40 crc kubenswrapper[4885]: E0130 00:09:40.142093 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.142253 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:40 crc kubenswrapper[4885]: E0130 00:09:40.142585 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:40 crc kubenswrapper[4885]: E0130 00:09:40.142700 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:40 crc kubenswrapper[4885]: E0130 00:09:40.142874 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.176296 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.176372 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.176391 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.176422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.176441 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.279222 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.279281 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.279292 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.279311 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.279333 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.382418 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.382487 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.382505 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.382539 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.382558 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.485648 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.485728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.485742 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.485796 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.485813 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.588701 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.588754 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.588791 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.588817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.588835 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.692568 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.692640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.692660 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.692689 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.692711 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.795984 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.796037 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.796047 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.796069 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.796085 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.899331 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.899401 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.899418 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.899441 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:40 crc kubenswrapper[4885]: I0130 00:09:40.899461 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:40Z","lastTransitionTime":"2026-01-30T00:09:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.001719 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.001800 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.001811 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.001840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.001852 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.018999 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.019053 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.019072 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.019096 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.019114 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: E0130 00:09:41.039242 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:41Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.044141 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.044201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.044221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.044248 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.044270 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: E0130 00:09:41.058163 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:41Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.062348 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.062396 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.062416 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.062438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.062456 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: E0130 00:09:41.078362 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:41Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.083477 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.083564 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.083587 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.083616 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.083637 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: E0130 00:09:41.097483 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:41Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.102689 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.102731 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.102741 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.102756 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.102788 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.116700 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 21:31:28.12782265 +0000 UTC Jan 30 00:09:41 crc kubenswrapper[4885]: E0130 00:09:41.119564 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:41Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:41 crc kubenswrapper[4885]: E0130 00:09:41.119678 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.121405 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.121426 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.121435 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.121448 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.121459 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.224874 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.224957 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.224984 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.225016 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.225039 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.328282 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.328345 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.328365 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.328390 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.328413 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.431165 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.431221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.431230 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.431253 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.431265 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.534338 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.534414 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.534432 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.534460 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.534480 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.636909 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.636961 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.636972 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.636989 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.637004 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.740504 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.740555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.740566 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.740585 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.740598 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.843892 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.843938 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.843949 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.843971 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.843984 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.947318 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.947358 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.947369 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.947386 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:41 crc kubenswrapper[4885]: I0130 00:09:41.947398 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:41Z","lastTransitionTime":"2026-01-30T00:09:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.050636 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.050684 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.050696 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.050712 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.050723 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.117077 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 12:04:00.597993357 +0000 UTC Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.140870 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.140965 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.140977 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.140944 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:42 crc kubenswrapper[4885]: E0130 00:09:42.141157 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:42 crc kubenswrapper[4885]: E0130 00:09:42.141254 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:42 crc kubenswrapper[4885]: E0130 00:09:42.141400 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:42 crc kubenswrapper[4885]: E0130 00:09:42.141505 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.153066 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.153096 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.153105 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.153120 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.153135 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.255837 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.255891 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.255905 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.255924 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.255938 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.358748 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.358877 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.358892 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.358921 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.358939 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.461665 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.461714 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.461726 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.461744 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.461756 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.564489 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.564542 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.564555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.564575 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.564587 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.666517 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.666559 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.666569 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.666587 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.666597 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.769665 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.769733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.769746 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.769781 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.769794 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.879238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.879295 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.879309 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.879328 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.879341 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.982256 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.982296 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.982306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.982322 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:42 crc kubenswrapper[4885]: I0130 00:09:42.982332 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:42Z","lastTransitionTime":"2026-01-30T00:09:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.085824 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.085894 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.085915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.085945 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.085963 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.117302 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 22:25:51.511442749 +0000 UTC Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.189183 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.189236 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.189245 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.189260 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.189269 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.292007 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.292068 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.292089 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.292116 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.292135 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.394965 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.395016 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.395028 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.395047 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.395063 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.497817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.497875 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.497889 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.497904 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.497912 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.600024 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.600090 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.600100 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.600116 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.600127 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.702509 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.702550 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.702563 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.702582 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.702596 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.804728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.804793 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.804803 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.804821 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.804833 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.907333 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.907377 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.907386 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.907401 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:43 crc kubenswrapper[4885]: I0130 00:09:43.907410 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:43Z","lastTransitionTime":"2026-01-30T00:09:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.010317 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.010371 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.010429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.010454 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.010469 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.113644 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.113687 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.113696 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.113709 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.113724 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.118090 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 15:14:00.270681115 +0000 UTC Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.140914 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.140934 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.140972 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:44 crc kubenswrapper[4885]: E0130 00:09:44.141041 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.141169 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:44 crc kubenswrapper[4885]: E0130 00:09:44.141342 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:44 crc kubenswrapper[4885]: E0130 00:09:44.141477 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:44 crc kubenswrapper[4885]: E0130 00:09:44.141597 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.143949 4885 scope.go:117] "RemoveContainer" containerID="0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988" Jan 30 00:09:44 crc kubenswrapper[4885]: E0130 00:09:44.144489 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.156429 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.174548 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.188552 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.200172 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.212972 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.216105 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.216140 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.216153 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.216172 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.216184 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.225530 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.241904 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.256722 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.284480 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.300974 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.312020 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.318113 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.318161 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.318171 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.318187 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.318197 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.323717 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.334929 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.345664 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.358438 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.373707 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.412411 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.421304 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.421379 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.421394 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.421422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.421439 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.429317 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:44Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.524395 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.524449 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.524463 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.524485 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.524500 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.628039 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.628090 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.628098 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.628116 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.628126 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.730936 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.730993 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.731003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.731019 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.731032 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.834547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.834614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.834626 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.834643 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.834663 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.938110 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.938190 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.938208 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.938238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:44 crc kubenswrapper[4885]: I0130 00:09:44.938258 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:44Z","lastTransitionTime":"2026-01-30T00:09:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.040625 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.040688 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.040708 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.040732 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.040750 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.118616 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 16:59:23.916845056 +0000 UTC Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.144257 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.144297 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.144307 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.144323 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.144334 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.247735 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.247845 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.247865 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.247892 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.247909 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.352237 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.352705 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.353336 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.353508 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.353677 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.456962 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.457318 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.457523 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.457686 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.457864 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.561201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.561269 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.561281 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.561309 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.561322 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.665501 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.665975 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.666086 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.666196 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.666278 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.769702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.769857 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.769919 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.769954 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.769974 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.840469 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:45 crc kubenswrapper[4885]: E0130 00:09:45.840741 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:45 crc kubenswrapper[4885]: E0130 00:09:45.840877 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:10:17.840852317 +0000 UTC m=+104.432324095 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.873199 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.873244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.873264 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.873282 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.873293 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.976391 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.976751 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.976840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.976910 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:45 crc kubenswrapper[4885]: I0130 00:09:45.976979 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:45Z","lastTransitionTime":"2026-01-30T00:09:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.080449 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.080817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.080945 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.081026 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.081097 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.119169 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 12:33:17.234829042 +0000 UTC Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.141735 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.141739 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.141869 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.141878 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:46 crc kubenswrapper[4885]: E0130 00:09:46.142042 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:46 crc kubenswrapper[4885]: E0130 00:09:46.142287 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:46 crc kubenswrapper[4885]: E0130 00:09:46.142429 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:46 crc kubenswrapper[4885]: E0130 00:09:46.142606 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.183547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.183591 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.183603 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.183619 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.183634 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.286781 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.286819 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.286830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.286844 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.286856 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.396897 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.396959 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.397071 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.397159 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.397185 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.500113 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.500170 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.500184 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.500207 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.500222 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.603955 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.603996 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.604006 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.604021 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.604032 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.643276 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/0.log" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.643344 4885 generic.go:334] "Generic (PLEG): container finished" podID="3f11e547-11fd-417a-be4a-e4f37d8e7839" containerID="f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758" exitCode=1 Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.643379 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerDied","Data":"f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.643856 4885 scope.go:117] "RemoveContainer" containerID="f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.662884 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.677191 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.691828 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.707713 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.707826 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.707889 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.707920 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.707943 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.707847 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.720700 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.740083 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.758395 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.774073 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.792212 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.810879 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.810915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.810924 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.810944 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.810958 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.811834 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.825516 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.841120 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.858048 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.881987 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.902400 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.913467 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.913499 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.913511 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.913531 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.913544 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:46Z","lastTransitionTime":"2026-01-30T00:09:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.920380 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.945969 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:46 crc kubenswrapper[4885]: I0130 00:09:46.969359 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:46Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.016726 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.016788 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.016801 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.016822 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.016835 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.119966 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 00:31:50.155303779 +0000 UTC Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.120175 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.120207 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.120220 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.120236 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.120248 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.222611 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.222667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.222680 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.222729 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.222742 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.324896 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.324934 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.324943 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.324959 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.324970 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.428016 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.428102 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.428117 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.428164 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.428179 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.531329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.531399 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.531409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.531432 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.531443 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.634913 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.635003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.635034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.635069 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.635094 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.651010 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/0.log" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.651101 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerStarted","Data":"d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.667791 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.683515 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.699982 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.716515 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.741790 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.742179 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.742221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.742243 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.742270 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.742291 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.758925 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.772475 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.784247 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.796583 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.809623 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.828437 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.846178 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.846241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.846256 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.846279 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.846296 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.860705 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.880511 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.898966 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.915420 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.934649 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.949354 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.949427 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.949446 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.949476 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.949494 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:47Z","lastTransitionTime":"2026-01-30T00:09:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.956954 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:47 crc kubenswrapper[4885]: I0130 00:09:47.974656 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:47Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.052005 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.052055 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.052065 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.052079 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.052089 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.120786 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 12:11:54.963658392 +0000 UTC Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.141203 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.141259 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.141208 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:48 crc kubenswrapper[4885]: E0130 00:09:48.141366 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.141390 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:48 crc kubenswrapper[4885]: E0130 00:09:48.141530 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:48 crc kubenswrapper[4885]: E0130 00:09:48.141704 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:48 crc kubenswrapper[4885]: E0130 00:09:48.141854 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.155705 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.155791 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.155806 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.155827 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.155846 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.258737 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.258911 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.258926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.258945 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.258958 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.361447 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.361515 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.361532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.361557 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.361576 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.464368 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.464438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.464456 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.464485 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.464523 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.567214 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.567319 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.567338 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.567364 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.567381 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.669957 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.669997 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.670008 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.670023 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.670034 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.772817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.772861 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.772872 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.772888 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.772903 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.875409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.875470 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.875480 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.875497 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.875509 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.979227 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.979283 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.979300 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.979323 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:48 crc kubenswrapper[4885]: I0130 00:09:48.979339 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:48Z","lastTransitionTime":"2026-01-30T00:09:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.081937 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.081993 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.082010 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.082035 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.082055 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.121863 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 22:04:19.268550811 +0000 UTC Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.186340 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.186414 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.186438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.186469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.186493 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.290446 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.290515 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.290532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.290556 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.290574 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.393550 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.393601 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.393612 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.393630 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.393645 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.497345 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.497406 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.497422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.497453 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.497467 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.601862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.601958 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.601982 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.602007 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.602022 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.705620 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.705701 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.705726 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.705761 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.705836 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.809421 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.809493 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.809509 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.809537 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.809556 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.913429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.913633 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.913673 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.913708 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:49 crc kubenswrapper[4885]: I0130 00:09:49.913728 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:49Z","lastTransitionTime":"2026-01-30T00:09:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.017252 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.017307 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.017323 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.017342 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.017354 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.120894 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.120958 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.120971 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.120993 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.121007 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.123034 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 11:34:39.354864178 +0000 UTC Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.140974 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.141117 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.140984 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:50 crc kubenswrapper[4885]: E0130 00:09:50.141185 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:50 crc kubenswrapper[4885]: E0130 00:09:50.141346 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.141425 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:50 crc kubenswrapper[4885]: E0130 00:09:50.141551 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:50 crc kubenswrapper[4885]: E0130 00:09:50.141635 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.224460 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.224538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.224557 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.224584 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.224606 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.327963 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.328008 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.328019 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.328035 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.328074 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.431570 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.431644 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.431656 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.431677 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.431690 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.535662 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.535725 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.535742 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.535763 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.535809 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.638961 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.639037 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.639050 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.639074 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.639089 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.742605 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.742666 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.742683 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.742708 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.742724 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.846261 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.846329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.846343 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.846360 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.846373 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.949948 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.950000 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.950009 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.950030 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:50 crc kubenswrapper[4885]: I0130 00:09:50.950041 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:50Z","lastTransitionTime":"2026-01-30T00:09:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.054102 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.054184 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.054208 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.054241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.054261 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.123506 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 02:26:44.473103437 +0000 UTC Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.156201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.156241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.156252 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.156270 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.156302 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.259155 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.259250 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.259267 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.259513 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.259615 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.363463 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.363564 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.363589 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.363620 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.363643 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.438217 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.438262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.438274 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.438293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.438306 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: E0130 00:09:51.461544 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:51Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.467355 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.467438 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.467457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.467482 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.467502 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: E0130 00:09:51.489433 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:51Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.494757 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.494830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.494843 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.494863 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.494875 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: E0130 00:09:51.509820 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:51Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.514847 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.514917 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.514939 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.514965 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.514983 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: E0130 00:09:51.530518 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:51Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.540206 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.540290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.540320 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.540395 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.540492 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: E0130 00:09:51.558604 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:51Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:51 crc kubenswrapper[4885]: E0130 00:09:51.558854 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.561634 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.561678 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.561692 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.561733 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.561752 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.669442 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.669530 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.669551 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.669581 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.669602 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.772820 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.772859 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.772876 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.772899 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.772915 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.876516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.876590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.876609 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.876637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.876655 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.980852 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.980928 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.980942 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.980967 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:51 crc kubenswrapper[4885]: I0130 00:09:51.980981 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:51Z","lastTransitionTime":"2026-01-30T00:09:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.085034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.085098 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.085110 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.085137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.085152 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.123839 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 10:32:55.001066471 +0000 UTC Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.141704 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.141778 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.141933 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:52 crc kubenswrapper[4885]: E0130 00:09:52.142153 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.142448 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:52 crc kubenswrapper[4885]: E0130 00:09:52.142562 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:52 crc kubenswrapper[4885]: E0130 00:09:52.142795 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:52 crc kubenswrapper[4885]: E0130 00:09:52.142992 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.252858 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.252937 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.252950 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.252975 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.252992 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.357246 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.357313 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.357330 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.357364 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.357384 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.461103 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.461160 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.461172 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.461196 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.461207 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.563830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.563897 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.563908 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.563931 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.563944 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.667125 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.667225 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.667266 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.667303 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.667326 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.770271 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.770347 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.770365 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.770389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.770405 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.873023 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.873069 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.873085 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.873109 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.873126 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.977106 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.977216 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.977240 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.977272 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:52 crc kubenswrapper[4885]: I0130 00:09:52.977297 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:52Z","lastTransitionTime":"2026-01-30T00:09:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.080458 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.080513 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.080524 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.080547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.081390 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.124463 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 17:44:14.487681152 +0000 UTC Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.185316 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.185404 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.185429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.185463 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.185490 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.288493 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.288549 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.288567 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.288593 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.288607 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.391707 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.391840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.391863 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.391902 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.391926 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.497618 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.497685 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.497698 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.497722 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.497743 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.603765 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.603828 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.603839 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.603860 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.603871 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.707003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.707047 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.707057 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.707073 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.707082 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.809510 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.809584 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.809596 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.809616 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.809630 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.913210 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.913282 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.913318 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.913353 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:53 crc kubenswrapper[4885]: I0130 00:09:53.913372 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:53Z","lastTransitionTime":"2026-01-30T00:09:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.016207 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.016262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.016275 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.016303 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.016316 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.119635 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.119708 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.119738 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.119820 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.119848 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.124980 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 15:16:08.828862208 +0000 UTC Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.141044 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.141173 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.141173 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:54 crc kubenswrapper[4885]: E0130 00:09:54.141283 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:54 crc kubenswrapper[4885]: E0130 00:09:54.141408 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.141424 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:54 crc kubenswrapper[4885]: E0130 00:09:54.141510 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:54 crc kubenswrapper[4885]: E0130 00:09:54.141619 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.163205 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.179954 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.197525 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.215656 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.223141 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.223195 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.223214 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.223243 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.223259 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.229898 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.249273 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.260942 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.282056 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.296350 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.319392 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.327847 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.327915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.327934 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.327963 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.327982 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.334110 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.349356 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.365076 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.382126 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.398463 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.414839 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.429137 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.430343 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.430383 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.430392 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.430429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.430442 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.445361 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:54Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.534222 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.534275 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.534285 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.534306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.534321 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.636181 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.636246 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.636264 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.636290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.636312 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.739862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.739944 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.739963 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.739986 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.740005 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.843547 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.843607 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.843625 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.843652 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.843673 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.947852 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.947908 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.947925 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.947953 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:54 crc kubenswrapper[4885]: I0130 00:09:54.947972 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:54Z","lastTransitionTime":"2026-01-30T00:09:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.050628 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.050700 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.050719 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.050749 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.050800 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.126072 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 08:13:52.289062061 +0000 UTC Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.153970 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.154019 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.154029 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.154048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.153979 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.154059 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.257293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.257356 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.257370 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.257393 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.257410 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.360555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.360626 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.360648 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.360681 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.360701 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.463655 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.463716 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.463728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.463755 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.463799 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.566737 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.566815 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.566825 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.566845 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.566855 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.670602 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.670667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.670680 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.670704 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.670722 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.774252 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.774332 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.774352 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.774417 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.774436 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.876954 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.877034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.877048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.877067 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.877080 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.980470 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.980548 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.980567 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.980618 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:55 crc kubenswrapper[4885]: I0130 00:09:55.980636 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:55Z","lastTransitionTime":"2026-01-30T00:09:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.083486 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.083566 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.083580 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.083600 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.083613 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.127113 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 04:46:10.287593888 +0000 UTC Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.141602 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.141591 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.141806 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:56 crc kubenswrapper[4885]: E0130 00:09:56.142106 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.142177 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:56 crc kubenswrapper[4885]: E0130 00:09:56.142380 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:56 crc kubenswrapper[4885]: E0130 00:09:56.142668 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:56 crc kubenswrapper[4885]: E0130 00:09:56.142705 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.187032 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.187114 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.187131 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.187161 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.187179 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.289932 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.289991 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.290004 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.290026 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.290039 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.393594 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.393655 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.393667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.393695 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.393715 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.497040 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.497111 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.497135 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.497167 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.497192 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.600646 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.600722 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.600735 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.600757 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.600803 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.704063 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.704125 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.704138 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.704161 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.704175 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.807193 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.807305 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.807334 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.807367 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.807385 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.910608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.910663 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.910674 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.910691 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:56 crc kubenswrapper[4885]: I0130 00:09:56.910703 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:56Z","lastTransitionTime":"2026-01-30T00:09:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.014839 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.014956 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.014974 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.015004 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.015021 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.118137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.118213 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.118249 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.118308 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.118334 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.127979 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 06:04:32.049368371 +0000 UTC Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.221637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.221689 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.221702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.221719 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.221733 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.325606 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.325676 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.325695 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.325728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.325763 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.429847 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.429892 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.429904 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.429928 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.429944 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.532619 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.532693 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.532706 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.532729 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.532746 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.636533 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.636590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.636608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.636637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.636656 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.738980 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.739351 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.739440 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.739534 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.739637 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.786463 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.786544 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786760 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786824 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786840 4885 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786877 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786913 4885 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786941 4885 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.786915 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.786892109 +0000 UTC m=+148.378363857 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.787054 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.787029362 +0000 UTC m=+148.378501150 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.842757 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.842848 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.842866 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.842893 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.842911 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.887425 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.887848 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.88781659 +0000 UTC m=+148.479288378 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.945361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.945659 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.945804 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.945930 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.946089 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:57Z","lastTransitionTime":"2026-01-30T00:09:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.989345 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:57 crc kubenswrapper[4885]: I0130 00:09:57.989687 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.989545 4885 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.990041 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.990016136 +0000 UTC m=+148.581487894 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.989837 4885 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:57 crc kubenswrapper[4885]: E0130 00:09:57.990264 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.990249042 +0000 UTC m=+148.581720800 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.050141 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.050188 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.050202 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.050225 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.050239 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.128942 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 05:14:04.228137456 +0000 UTC Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.141745 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.141837 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.141901 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:09:58 crc kubenswrapper[4885]: E0130 00:09:58.142096 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.142225 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:09:58 crc kubenswrapper[4885]: E0130 00:09:58.142407 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:09:58 crc kubenswrapper[4885]: E0130 00:09:58.143044 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:09:58 crc kubenswrapper[4885]: E0130 00:09:58.143151 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.143654 4885 scope.go:117] "RemoveContainer" containerID="0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.153130 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.153177 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.153195 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.153222 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.153243 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.256488 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.256564 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.256587 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.256620 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.256641 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.360471 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.360548 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.360575 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.360612 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.360641 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.464588 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.464668 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.464691 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.464720 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.464740 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.568966 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.569041 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.569060 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.569090 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.569110 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.672353 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.673088 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.673121 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.673150 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.673171 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.776907 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.776979 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.777003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.777040 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.777064 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.880378 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.880460 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.880485 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.880519 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.880548 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.983699 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.983749 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.983808 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.983834 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:58 crc kubenswrapper[4885]: I0130 00:09:58.983863 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:58Z","lastTransitionTime":"2026-01-30T00:09:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.086826 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.086890 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.086907 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.086934 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.086953 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.129506 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 16:49:24.867956522 +0000 UTC Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.189859 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.189889 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.189898 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.189914 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.189923 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.292526 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.292576 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.292592 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.292618 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.292636 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.394650 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.394689 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.394700 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.394717 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.394729 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.497798 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.497886 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.497911 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.497946 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.497971 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.601326 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.601369 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.601381 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.601400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.601412 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.703860 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.703899 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.703912 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.703934 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.703947 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.705165 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/2.log" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.709398 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.709930 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.733362 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.749424 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.760120 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.772499 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.789638 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.801467 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.806210 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.806246 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.806257 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.806276 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.806288 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.812726 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.825799 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.836090 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.849954 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5326ca3e-fe69-4a11-9359-5c1d583d8ea5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7509c2908aaef44bb980031051612087897581d2fab9d59af0198902e221704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.860737 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.877649 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.899268 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.912823 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.912868 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.912878 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.912897 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.912907 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:09:59Z","lastTransitionTime":"2026-01-30T00:09:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.918884 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.936552 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.951758 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.965556 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.979393 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:09:59 crc kubenswrapper[4885]: I0130 00:09:59.994042 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:09:59Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.014814 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.014859 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.014869 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.014885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.014894 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.118090 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.118454 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.118476 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.118504 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.118523 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.129663 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 02:50:51.157501594 +0000 UTC Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.140982 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.141025 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.140992 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.141182 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:00 crc kubenswrapper[4885]: E0130 00:10:00.141175 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:00 crc kubenswrapper[4885]: E0130 00:10:00.141322 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:00 crc kubenswrapper[4885]: E0130 00:10:00.141399 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:00 crc kubenswrapper[4885]: E0130 00:10:00.141475 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.221388 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.221443 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.221457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.221478 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.221492 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.324357 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.324397 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.324408 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.324428 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.324441 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.428081 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.428152 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.428171 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.428201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.428221 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.531516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.531597 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.531621 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.531657 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.531698 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.635102 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.635175 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.635198 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.635238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.635265 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.716045 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/3.log" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.717244 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/2.log" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.721509 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" exitCode=1 Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.721581 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.721669 4885 scope.go:117] "RemoveContainer" containerID="0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.722713 4885 scope.go:117] "RemoveContainer" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" Jan 30 00:10:00 crc kubenswrapper[4885]: E0130 00:10:00.723056 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.739050 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.739102 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.739121 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.739149 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.739168 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.745191 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.763257 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.783048 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.806084 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.830379 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.844034 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.844105 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.844127 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.844157 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.844187 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.850307 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.872870 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.891375 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.908520 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.923003 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.939103 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5326ca3e-fe69-4a11-9359-5c1d583d8ea5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7509c2908aaef44bb980031051612087897581d2fab9d59af0198902e221704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.946926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.946977 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.946995 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.947025 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.947043 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:00Z","lastTransitionTime":"2026-01-30T00:10:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.973220 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:00 crc kubenswrapper[4885]: I0130 00:10:00.989307 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:00Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.003819 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.019711 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.037123 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.051926 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.051986 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.052009 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.052045 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.052070 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.053482 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.074581 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.108131 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0cf4b54e0f083169f45556dc42e2c9d37585f261b5d349b206bfd0b206a78988\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:28Z\\\",\\\"message\\\":\\\" for *v1.Pod openshift-machine-config-operator/machine-config-daemon-bmd5j after 0 failed attempt(s)\\\\nI0130 00:09:28.122154 6511 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-bmd5j\\\\nI0130 00:09:28.122141 6511 lb_config.go:1031] Cluster endpoints for openshift-marketplace/redhat-operators for network=default are: map[]\\\\nI0130 00:09:28.121751 6511 services_controller.go:434] Service openshift-machine-config-operator/machine-config-operator retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-operator openshift-machine-config-operator 8bc1afc2-8724-4135-84df-aee09f23af4c 4514 0 2025-02-23 05:12:24 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-operator] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mco-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc00796b2fb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:10:00Z\\\",\\\"message\\\":\\\"gressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139104 6963 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139310 6963 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 00:10:00.139396 6963 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 00:10:00.139412 6963 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139441 6963 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139595 6963 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139992 6963 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.140324 6963 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.130268 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 02:52:39.030778319 +0000 UTC Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.156010 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.156055 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.156067 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.156085 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.156099 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.259906 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.259973 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.259991 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.260019 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.260039 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.363280 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.363351 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.363383 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.363410 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.363424 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.466665 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.466715 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.466734 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.466764 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.466822 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.570130 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.570216 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.570241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.570272 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.570294 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.673727 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.673791 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.673803 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.673822 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.673833 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.726479 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/3.log" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.728994 4885 scope.go:117] "RemoveContainer" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.729140 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.739745 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.751006 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.766038 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.776543 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.776608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.776618 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.776641 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.776653 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.779707 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5326ca3e-fe69-4a11-9359-5c1d583d8ea5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7509c2908aaef44bb980031051612087897581d2fab9d59af0198902e221704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.822485 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.849974 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.867203 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.867280 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.867303 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.867332 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.867350 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.876667 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:10:00Z\\\",\\\"message\\\":\\\"gressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139104 6963 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139310 6963 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 00:10:00.139396 6963 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 00:10:00.139412 6963 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139441 6963 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139595 6963 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139992 6963 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.140324 6963 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.882798 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.886884 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.886930 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.886943 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.886962 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.886972 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.887422 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.898834 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.899502 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.902087 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.902128 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.902142 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.902165 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.902179 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.914247 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.915829 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.919548 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.919597 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.919613 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.919637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.919653 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.927836 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.931894 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.935457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.935483 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.935494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.935517 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.935529 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.941326 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.950966 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: E0130 00:10:01.951102 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.952705 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.952758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.952804 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.952831 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.952850 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:01Z","lastTransitionTime":"2026-01-30T00:10:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.956941 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.970094 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.986580 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:01 crc kubenswrapper[4885]: I0130 00:10:01.997330 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:01Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.007063 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.017973 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.030546 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:02Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.055219 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.055280 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.055304 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.055331 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.055356 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.131421 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 04:07:50.66277648 +0000 UTC Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.141276 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.141350 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.141375 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:02 crc kubenswrapper[4885]: E0130 00:10:02.141519 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:02 crc kubenswrapper[4885]: E0130 00:10:02.141698 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.142003 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:02 crc kubenswrapper[4885]: E0130 00:10:02.142093 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:02 crc kubenswrapper[4885]: E0130 00:10:02.142374 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.157738 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.157812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.157829 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.157856 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.157872 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.260999 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.261057 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.261070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.261092 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.261104 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.363883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.364298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.364451 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.364614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.364760 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.467982 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.468078 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.468102 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.468448 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.468473 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.571914 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.571992 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.572016 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.572052 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.572080 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.675071 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.675146 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.675170 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.675202 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.675222 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.777807 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.777857 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.777870 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.777892 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.777906 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.881105 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.881172 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.881189 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.881211 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.881226 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.984227 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.984274 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.984285 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.984304 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:02 crc kubenswrapper[4885]: I0130 00:10:02.984315 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:02Z","lastTransitionTime":"2026-01-30T00:10:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.087561 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.087626 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.087644 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.087673 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.087691 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.132296 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 16:32:31.099777023 +0000 UTC Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.190930 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.190993 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.191012 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.191036 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.191054 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.294645 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.295029 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.295138 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.295254 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.295345 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.398529 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.398583 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.398600 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.398627 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.398643 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.502835 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.502918 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.502941 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.502978 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.503013 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.606065 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.606123 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.606140 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.606168 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.606187 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.709373 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.709449 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.709472 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.709507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.709531 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.812586 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.812661 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.812675 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.812707 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.812721 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.916141 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.916293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.916372 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.916465 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:03 crc kubenswrapper[4885]: I0130 00:10:03.916498 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:03Z","lastTransitionTime":"2026-01-30T00:10:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.020466 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.020523 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.020535 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.020557 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.020571 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.124488 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.124559 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.124582 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.124614 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.124636 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.132810 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 22:49:11.790600882 +0000 UTC Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.141219 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.141256 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:04 crc kubenswrapper[4885]: E0130 00:10:04.141413 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.141495 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.141540 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:04 crc kubenswrapper[4885]: E0130 00:10:04.141716 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:04 crc kubenswrapper[4885]: E0130 00:10:04.142118 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:04 crc kubenswrapper[4885]: E0130 00:10:04.142332 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.160497 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.179941 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.195147 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.210416 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5326ca3e-fe69-4a11-9359-5c1d583d8ea5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7509c2908aaef44bb980031051612087897581d2fab9d59af0198902e221704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.227967 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.228028 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.228048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.228082 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.228107 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.245616 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.266999 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.302806 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:10:00Z\\\",\\\"message\\\":\\\"gressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139104 6963 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139310 6963 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 00:10:00.139396 6963 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 00:10:00.139412 6963 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139441 6963 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139595 6963 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139992 6963 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.140324 6963 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.319368 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.330429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.330469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.330481 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.330504 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.330518 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.334306 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.350153 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.367696 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.384247 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.400112 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.419184 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.434481 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.434727 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.434822 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.434902 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.434964 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.435337 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.448707 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.464215 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.482228 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.502353 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:04Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.537256 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.537558 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.537642 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.537747 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.537858 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.641533 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.641928 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.642020 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.642128 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.642268 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.745183 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.745254 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.745273 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.745303 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.745323 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.848122 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.848196 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.848215 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.848244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.848263 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.951674 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.951720 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.951732 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.951751 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:04 crc kubenswrapper[4885]: I0130 00:10:04.951763 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:04Z","lastTransitionTime":"2026-01-30T00:10:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.054615 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.054668 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.054680 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.054702 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.054716 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.133561 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 12:11:48.769623894 +0000 UTC Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.158114 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.158171 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.158184 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.158209 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.158222 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.261183 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.261258 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.261270 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.261293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.261306 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.364380 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.364434 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.364443 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.364464 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.364475 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.467532 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.467613 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.467632 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.467663 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.467685 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.570369 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.570457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.570474 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.570494 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.570507 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.673509 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.673545 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.673553 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.673570 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.673581 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.776128 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.776205 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.776223 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.776257 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.776278 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.879952 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.880037 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.880076 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.880111 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.880139 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.984069 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.984116 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.984125 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.984145 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:05 crc kubenswrapper[4885]: I0130 00:10:05.984156 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:05Z","lastTransitionTime":"2026-01-30T00:10:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.087667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.087731 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.087790 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.087813 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.087829 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.134164 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 02:38:40.565941069 +0000 UTC Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.142130 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.142202 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.142289 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:06 crc kubenswrapper[4885]: E0130 00:10:06.142330 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.142305 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:06 crc kubenswrapper[4885]: E0130 00:10:06.142522 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:06 crc kubenswrapper[4885]: E0130 00:10:06.142702 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:06 crc kubenswrapper[4885]: E0130 00:10:06.142991 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.190465 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.190525 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.190542 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.190567 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.190586 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.293714 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.293806 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.293817 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.293836 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.293867 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.397307 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.397389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.397407 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.397435 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.397454 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.501424 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.501495 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.501522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.501557 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.501576 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.604586 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.604637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.604649 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.604667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.604679 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.708558 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.708618 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.708629 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.708649 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.708666 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.811983 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.812037 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.812054 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.812088 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.812109 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.914895 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.914951 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.914971 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.914992 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:06 crc kubenswrapper[4885]: I0130 00:10:06.915004 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:06Z","lastTransitionTime":"2026-01-30T00:10:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.018716 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.018778 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.018789 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.018807 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.018818 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.123033 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.123099 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.123120 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.123160 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.123181 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.134603 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 06:46:15.468824679 +0000 UTC Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.226408 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.226446 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.226458 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.226478 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.226493 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.330477 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.330695 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.330722 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.330829 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.330906 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.434191 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.434247 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.434259 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.434291 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.434307 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.537409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.537461 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.537474 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.537495 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.537509 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.640214 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.640273 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.640437 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.640487 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.640509 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.744503 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.745070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.745305 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.745516 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.745731 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.850104 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.850238 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.850262 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.850296 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.850315 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.954882 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.954970 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.954992 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.955031 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:07 crc kubenswrapper[4885]: I0130 00:10:07.955055 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:07Z","lastTransitionTime":"2026-01-30T00:10:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.058389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.058460 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.058479 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.058507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.058522 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.135580 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 05:42:49.735048375 +0000 UTC Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.140955 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.141029 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.141022 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.140955 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:08 crc kubenswrapper[4885]: E0130 00:10:08.141134 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:08 crc kubenswrapper[4885]: E0130 00:10:08.141230 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:08 crc kubenswrapper[4885]: E0130 00:10:08.141533 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:08 crc kubenswrapper[4885]: E0130 00:10:08.141596 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.161304 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.161352 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.161363 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.161389 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.161415 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.265334 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.265371 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.265383 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.265404 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.265439 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.368287 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.368354 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.368367 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.368394 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.368410 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.472086 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.472144 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.472164 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.472193 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.472213 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.576472 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.576546 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.576566 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.576598 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.576622 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.680117 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.680190 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.680209 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.680242 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.680262 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.783171 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.783220 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.783231 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.783251 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.783266 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.886429 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.886503 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.886522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.886555 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.886580 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.990289 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.990358 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.990382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.990414 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:08 crc kubenswrapper[4885]: I0130 00:10:08.990435 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:08Z","lastTransitionTime":"2026-01-30T00:10:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.095163 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.095248 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.095275 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.095386 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.095408 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.135976 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 09:16:20.463473636 +0000 UTC Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.198456 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.198584 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.198604 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.198634 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.198653 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.302419 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.302510 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.302538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.302577 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.302603 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.405680 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.405760 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.405835 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.405876 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.405905 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.509507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.509582 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.509605 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.509640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.509666 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.613590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.613665 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.613684 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.613713 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.613732 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.717399 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.717454 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.717468 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.717488 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.717501 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.820750 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.820848 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.820871 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.820905 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.820931 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.925077 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.925163 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.925188 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.925221 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:09 crc kubenswrapper[4885]: I0130 00:10:09.925245 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:09Z","lastTransitionTime":"2026-01-30T00:10:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.028909 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.028977 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.028998 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.029031 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.029055 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.131711 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.131795 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.131805 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.131825 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.131835 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.136852 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 22:50:12.239480246 +0000 UTC Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.141269 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.141296 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.141269 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:10 crc kubenswrapper[4885]: E0130 00:10:10.141471 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.141521 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:10 crc kubenswrapper[4885]: E0130 00:10:10.141605 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:10 crc kubenswrapper[4885]: E0130 00:10:10.141764 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:10 crc kubenswrapper[4885]: E0130 00:10:10.141903 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.234814 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.234868 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.234877 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.234895 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.234905 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.336959 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.337014 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.337030 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.337054 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.337070 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.441189 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.441255 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.441275 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.441306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.441330 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.544136 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.544164 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.544174 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.544192 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.544201 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.646750 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.646834 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.646846 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.646867 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.646880 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.749896 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.749975 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.749997 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.750030 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.750053 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.853758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.853858 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.853885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.853920 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.853942 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.957625 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.957684 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.957697 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.957721 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:10 crc kubenswrapper[4885]: I0130 00:10:10.957736 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:10Z","lastTransitionTime":"2026-01-30T00:10:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.060561 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.060628 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.060643 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.060669 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.060686 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.137485 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 15:24:53.469319659 +0000 UTC Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.163998 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.164040 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.164053 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.164074 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.164088 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.267579 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.267627 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.267637 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.267653 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.267663 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.371358 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.371411 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.371423 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.371445 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.371460 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.474439 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.474500 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.474515 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.474544 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.474562 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.578162 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.578311 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.578337 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.578368 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.578388 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.681100 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.681441 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.681460 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.681492 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.681511 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.784024 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.784174 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.784201 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.784229 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.784247 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.887738 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.887840 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.887864 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.887896 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.887917 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.990144 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.990905 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.990956 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.990987 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.991005 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.993132 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.993164 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.993178 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.993199 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:11 crc kubenswrapper[4885]: I0130 00:10:11.993216 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:11Z","lastTransitionTime":"2026-01-30T00:10:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.012077 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.017416 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.017457 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.017469 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.017490 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.017502 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.037530 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.042307 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.042348 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.042361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.042382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.042396 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.056920 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.060578 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.060617 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.060629 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.060650 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.060664 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.075807 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.079841 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.079881 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.079893 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.079915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.079929 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.097370 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:12Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.097589 4885 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.100522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.100569 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.100580 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.100599 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.100610 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.138593 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 11:35:55.040854451 +0000 UTC Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.141068 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.141110 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.141111 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.141079 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.141281 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.141496 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.141569 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:12 crc kubenswrapper[4885]: E0130 00:10:12.141671 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.202998 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.203062 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.203082 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.203114 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.203135 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.309843 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.310941 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.310992 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.311026 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.311048 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.414080 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.414137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.414150 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.414174 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.414191 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.517646 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.517707 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.517730 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.517766 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.517850 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.621208 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.621255 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.621265 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.621283 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.621293 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.724122 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.724224 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.724242 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.724266 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.724283 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.827906 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.827970 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.827982 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.828003 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.828017 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.931117 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.931175 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.931187 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.931209 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:12 crc kubenswrapper[4885]: I0130 00:10:12.931225 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:12Z","lastTransitionTime":"2026-01-30T00:10:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.034110 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.034158 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.034170 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.034192 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.034206 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.137905 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.137977 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.137995 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.138027 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.138044 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.138935 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 08:29:30.166528494 +0000 UTC Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.241352 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.241424 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.241442 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.241474 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.241499 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.345351 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.345956 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.346010 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.346048 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.346073 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.448723 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.448812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.448847 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.448888 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.448912 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.551969 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.552106 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.552168 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.552197 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.552214 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.655094 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.655154 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.655171 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.655198 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.655216 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.759068 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.759124 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.759137 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.759159 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.759176 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.862033 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.862297 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.862380 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.862422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.862446 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.965885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.965964 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.965983 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.966016 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:13 crc kubenswrapper[4885]: I0130 00:10:13.966037 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:13Z","lastTransitionTime":"2026-01-30T00:10:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.070476 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.070550 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.070568 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.070598 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.070619 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.139592 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 13:09:21.513600112 +0000 UTC Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.140951 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.141034 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:14 crc kubenswrapper[4885]: E0130 00:10:14.141156 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.141058 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.140967 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:14 crc kubenswrapper[4885]: E0130 00:10:14.141316 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:14 crc kubenswrapper[4885]: E0130 00:10:14.141416 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:14 crc kubenswrapper[4885]: E0130 00:10:14.141534 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.165469 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"147e5e96-db98-498f-b4a4-927d73cb5db5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:10:00Z\\\",\\\"message\\\":\\\"gressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139104 6963 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0130 00:10:00.139310 6963 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0130 00:10:00.139396 6963 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0130 00:10:00.139412 6963 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139441 6963 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139595 6963 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.139992 6963 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0130 00:10:00.140324 6963 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dhwkm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-hwpvs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.175890 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.175962 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.175987 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.176019 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.176041 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.179904 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"41b99e9c-eadb-404c-9596-1b102ac85157\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6952b380ef817e1d3b7337f60ea6b8ce855239133290cd00dc8681a6e6d559c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nq7jl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bmd5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.194461 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.204081 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-4t96d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7dcff61-ca91-42c4-83dc-2a502099dff1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2cac95011421490621bc7c65b19e0533b79c245b15fd13e430847aae510712a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vg7zc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-4t96d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.221052 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-xmv9h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f11e547-11fd-417a-be4a-e4f37d8e7839\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-30T00:09:46Z\\\",\\\"message\\\":\\\"2026-01-30T00:09:01+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b\\\\n2026-01-30T00:09:01+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ea062cd3-7160-4bb2-b1e2-4ca523b0f07b to /host/opt/cni/bin/\\\\n2026-01-30T00:09:01Z [verbose] multus-daemon started\\\\n2026-01-30T00:09:01Z [verbose] Readiness Indicator file check\\\\n2026-01-30T00:09:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gxj7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-xmv9h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.239808 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-72hlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b01f0fb2-4c71-437b-9ac2-5ca44830f3a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04e33c0397f8aa7df5a60b14fd05d977262562f8829fb8da756115aa1e04044d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26aac3153c42ac1b77b09287120e222fd94afa156e7da3081ad4b3a58103b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://178fca9886f1f6c30f5ef1fdf3f9d64ce219eebf4903c4f0062f9853a47f9590\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f0878a934b674719385f867e8fc75e466c7a8b90c328e731193cee2b2aa7e6bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b549a8daeb6bf5494163ac530a0c475950909a13973ba91aa30223e560e4587\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86a149c09c7747ca9773678579b04b9cde13a11d76b4e50ef149bdc4a2d6168e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a980d6e8a6673247b478c68745728b04d387c4ed75d3beeafdfcbd89d04f3d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:09:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:09:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5pbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-72hlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.255808 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57dad425-1427-4159-b1dc-4991186f30f8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0130 00:08:47.851903 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0130 00:08:47.855137 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3198168842/tls.crt::/tmp/serving-cert-3198168842/tls.key\\\\\\\"\\\\nI0130 00:08:53.543230 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0130 00:08:53.548847 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0130 00:08:53.548871 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0130 00:08:53.548894 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0130 00:08:53.548901 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0130 00:08:53.554708 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0130 00:08:53.554792 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554803 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0130 00:08:53.554813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0130 00:08:53.554819 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0130 00:08:53.554828 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0130 00:08:53.554836 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0130 00:08:53.554885 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0130 00:08:53.559231 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.270724 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.278630 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.278674 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.278686 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.278704 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.278717 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.291237 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73d271f34fa32f8cd79f4120f455dc4ed6b1da701ef60b1e8f2b5f0ad10e3b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac3bc994dc385c609db2d39e29707bc98dc12ecf2d0ba761c5e57c16ba30edae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.307792 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c55b109cab483ddd5e1da79ff5494f80e192dfa3c6b6c301d8c668153742e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.322568 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"25ae5831-29e5-4c83-bdde-61beb2c720b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d35a7861408b8af5f39440ce067caf6695ab4ab5619f9e938336f4dc2db2e693\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d6da06955e7029112ba8c7226c6b6d84d5da2886a3ba4a537986350f00b73f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac9c72734240c803e3063641ebacb511d647313e431c311816f836a770caac7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.335386 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76b3c14d-318a-4269-a669-be91a3a30425\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75325616731f47d3b1ac4f319f5190066c26fd2b2a2b20360bf25bfba831ff21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0896487baf92f89c8128ccac2ecc8ababb15c58c7cfe31432de7fe9095236e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://87b921c6d7b3ea99ae7c08fdc3e0a2b5ae0ec300c5ca5f0be5fb8de83d620537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fad5c35eb90ef8c48d77a1c7a89bacc5492f4d40f0cbcc5f3530fc3bbe2399\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.351619 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97b349001db75720b6417f1ccecd59dea515280b412abbb776dd0d643914515d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.365432 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-zrbl7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f93c3da6-7e48-4079-9673-455594d63c9b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c608d6eb4a7254eab873177d4cebf56e8b94010eed4bd524691ca6df079d216d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gn6dq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:58Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-zrbl7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.378176 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b25dd46-9353-45e2-86c3-ba3cdb6592e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20bcaeae22fbe2c1ed3aae71f5a6ef7ed50b90d1485e6a793bafd4e72ab0453\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e676f6c9a7795d8f3a7c8ec94db7201104c58cb83d213dc3f5bdcd47ca803b4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:09:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l68rg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pdvgr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.380876 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.380906 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.380915 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.380933 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.380943 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.392489 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"313f7566-bae9-4b9c-8c30-9e3c7aef8364\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:09:13Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dfbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:09:13Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-hg2nk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.406962 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5326ca3e-fe69-4a11-9359-5c1d583d8ea5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7509c2908aaef44bb980031051612087897581d2fab9d59af0198902e221704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://822e1d88a470745d8f5a094c2a9e352038fee6104ac7a3d61fb5f130d8b4abad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.430516 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d6c5602-b084-419d-ab24-12927e0f5d35\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c2eead3e1329e6838944807912851c0f8d093391427450d31c026faf8967ff9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea05bf53b99d1d19cba7fbf3c08d6e9515e837e5556f91f8c23df1646a6d358a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://048b71739b94c38d6403aed63eaade0d3242205f49b388908ab9ec0c50adbb37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03edc88afb04af3319b975595afc4b2387071035bf83c49979395b3d6f0eab8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e5b7c43839605c35bb9f6ce9c3c5dfb1bd9479ca167fbfa4f77ae5486323ffa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-30T00:08:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://730081218f34e114668ba972afe947161b293a0b89bb36e85c8b8e0235d50327\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a2916a32eb258eb9e0055f658a2295c86b1f5f381b5424146bb33d49eecba1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32c6ef5af5a0b7476e71048c3f5032c484d7c6dac412cd2dfb2084b324964716\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-30T00:08:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-30T00:08:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-30T00:08:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.446746 4885 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-30T00:08:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:14Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.483901 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.483942 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.483957 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.483981 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.483996 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.587669 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.587745 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.587885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.587918 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.587936 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.691709 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.691801 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.691820 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.691843 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.691857 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.794488 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.794538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.794576 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.794600 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.794614 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.897885 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.897952 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.897972 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.898002 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:14 crc kubenswrapper[4885]: I0130 00:10:14.898024 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:14Z","lastTransitionTime":"2026-01-30T00:10:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.000761 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.000905 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.000934 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.000970 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.000993 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.103902 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.103974 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.103999 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.104032 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.104055 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.140758 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 02:13:20.160375024 +0000 UTC Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.142378 4885 scope.go:117] "RemoveContainer" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" Jan 30 00:10:15 crc kubenswrapper[4885]: E0130 00:10:15.142663 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.206556 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.206672 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.206698 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.206821 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.206854 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.310247 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.310306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.310325 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.310353 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.310370 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.416116 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.416175 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.416192 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.416222 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.416241 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.519507 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.519597 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.519631 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.519667 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.519687 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.622812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.622871 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.622886 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.623001 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.623021 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.726191 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.726244 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.726263 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.726297 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.726316 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.833454 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.833609 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.834482 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.834539 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.834561 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.937731 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.937830 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.937851 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.937880 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:15 crc kubenswrapper[4885]: I0130 00:10:15.937901 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:15Z","lastTransitionTime":"2026-01-30T00:10:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.057088 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.057134 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.057145 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.057169 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.057182 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.141086 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 19:42:57.154767983 +0000 UTC Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.141252 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.141291 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.141316 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.141316 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:16 crc kubenswrapper[4885]: E0130 00:10:16.141389 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:16 crc kubenswrapper[4885]: E0130 00:10:16.141524 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:16 crc kubenswrapper[4885]: E0130 00:10:16.141569 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:16 crc kubenswrapper[4885]: E0130 00:10:16.141920 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.159237 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.159289 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.159306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.159329 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.159349 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.262906 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.262968 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.262978 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.263051 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.263069 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.366812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.366862 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.366874 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.366895 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.366907 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.470414 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.470465 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.470477 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.470497 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.470511 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.576758 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.576883 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.576907 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.576943 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.576967 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.680269 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.680347 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.680366 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.680396 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.680422 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.784194 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.784266 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.784293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.784325 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.784349 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.887163 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.887237 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.887261 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.887290 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.887308 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.990209 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.990284 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.990306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.990337 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:16 crc kubenswrapper[4885]: I0130 00:10:16.990399 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:16Z","lastTransitionTime":"2026-01-30T00:10:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.095018 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.095081 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.095094 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.095115 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.095127 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.141583 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 13:13:21.667733668 +0000 UTC Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.197918 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.197984 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.198001 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.198028 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.198045 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.301891 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.301956 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.301975 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.302002 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.302019 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.404623 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.404687 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.404706 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.404737 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.404761 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.507936 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.508004 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.508030 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.508064 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.508089 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.611277 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.611322 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.611335 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.611356 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.611370 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.714707 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.714812 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.714839 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.714875 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.714896 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.818097 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.818170 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.818195 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.818230 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.818253 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.849719 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:17 crc kubenswrapper[4885]: E0130 00:10:17.850019 4885 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:10:17 crc kubenswrapper[4885]: E0130 00:10:17.850164 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs podName:313f7566-bae9-4b9c-8c30-9e3c7aef8364 nodeName:}" failed. No retries permitted until 2026-01-30 00:11:21.850129903 +0000 UTC m=+168.441601681 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs") pod "network-metrics-daemon-hg2nk" (UID: "313f7566-bae9-4b9c-8c30-9e3c7aef8364") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.924502 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.924568 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.924590 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.924624 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:17 crc kubenswrapper[4885]: I0130 00:10:17.924650 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:17Z","lastTransitionTime":"2026-01-30T00:10:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.029018 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.029103 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.029124 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.029151 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.029169 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.132622 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.132697 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.132721 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.132754 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.132841 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.141028 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.141080 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:18 crc kubenswrapper[4885]: E0130 00:10:18.141251 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.141335 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.141347 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:18 crc kubenswrapper[4885]: E0130 00:10:18.141499 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:18 crc kubenswrapper[4885]: E0130 00:10:18.141644 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.141728 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 13:32:21.137003233 +0000 UTC Jan 30 00:10:18 crc kubenswrapper[4885]: E0130 00:10:18.141819 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.235895 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.235958 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.235972 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.235995 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.236010 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.338478 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.338538 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.338554 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.338580 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.338597 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.441571 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.441626 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.441644 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.441671 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.441690 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.544533 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.544599 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.544608 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.544625 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.544634 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.647554 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.647604 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.647621 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.647648 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.647664 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.750955 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.751026 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.751044 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.751074 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.751096 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.853858 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.853909 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.853922 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.853944 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.853957 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.957385 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.957440 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.957456 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.957483 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:18 crc kubenswrapper[4885]: I0130 00:10:18.957501 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:18Z","lastTransitionTime":"2026-01-30T00:10:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.061070 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.061154 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.061178 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.061218 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.061243 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.142158 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 18:41:09.008304044 +0000 UTC Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.165241 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.165317 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.165343 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.165377 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.165403 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.268232 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.268298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.268318 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.268399 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.268422 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.372240 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.372293 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.372310 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.372336 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.372355 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.475596 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.475677 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.475701 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.475728 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.475745 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.579058 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.579108 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.579120 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.579142 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.579158 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.682304 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.682391 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.682422 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.682458 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.682481 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.786542 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.786663 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.786690 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.786726 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.786755 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.889390 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.889466 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.889487 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.889522 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.889548 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.992626 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.992677 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.992688 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.992710 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:19 crc kubenswrapper[4885]: I0130 00:10:19.992724 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:19Z","lastTransitionTime":"2026-01-30T00:10:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.095825 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.095887 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.095907 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.095929 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.095943 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.141618 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.141668 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.141666 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:20 crc kubenswrapper[4885]: E0130 00:10:20.141966 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.142056 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:20 crc kubenswrapper[4885]: E0130 00:10:20.142981 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.143127 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 02:36:59.558370134 +0000 UTC Jan 30 00:10:20 crc kubenswrapper[4885]: E0130 00:10:20.143163 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:20 crc kubenswrapper[4885]: E0130 00:10:20.143287 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.199640 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.199713 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.199732 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.199794 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.199815 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.303382 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.303426 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.303440 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.303461 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.303480 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.406969 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.407021 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.407031 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.407051 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.407065 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.509868 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.509947 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.509972 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.510010 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.510037 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.613298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.613370 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.613383 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.613410 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.613423 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.717306 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.717373 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.717391 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.717420 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.717442 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.820395 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.820461 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.820479 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.820506 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.820527 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.924958 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.925114 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.925149 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.925226 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:20 crc kubenswrapper[4885]: I0130 00:10:20.925257 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:20Z","lastTransitionTime":"2026-01-30T00:10:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.029505 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.029605 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.029625 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.029663 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.029688 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.139319 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.139421 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.139448 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.139484 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.139511 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.143720 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:12:29.375247797 +0000 UTC Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.243976 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.244073 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.244100 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.244133 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.244156 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.348678 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.348762 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.348832 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.348866 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.348886 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.452361 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.452598 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.452616 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.452639 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.452651 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.556148 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.556218 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.556230 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.556253 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.556266 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.659500 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.659565 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.659577 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.659598 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.659610 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.763200 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.763266 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.763278 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.763298 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.763401 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.866459 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.866496 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.866508 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.866525 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.866536 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.969360 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.969400 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.969409 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.969425 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:21 crc kubenswrapper[4885]: I0130 00:10:21.969434 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:21Z","lastTransitionTime":"2026-01-30T00:10:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.072520 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.072592 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.072611 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.072636 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.072653 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.122150 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.122216 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.122229 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.122249 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.122262 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.141015 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.141260 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.141355 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.141748 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.141972 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.142163 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.142257 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.142404 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.143116 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:22Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.144160 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 23:59:38.999092005 +0000 UTC Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.148108 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.148142 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.148154 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.148174 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.148189 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.169508 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:22Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.174970 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.175032 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.175044 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.175062 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.175073 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.188833 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:22Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.193210 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.193291 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.193309 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.193333 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.193348 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: E0130 00:10:22.209388 4885 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-30T00:10:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"263f6cfa-1c2e-4348-adce-fb51ebda12f3\\\",\\\"systemUUID\\\":\\\"03cb89c2-61ef-45e2-93cd-6b2804f1f0de\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-30T00:10:22Z is after 2025-08-24T17:21:41Z" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.214203 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.214229 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.214254 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.214271 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.214281 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.237403 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.237439 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.237450 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.237471 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.237485 4885 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-30T00:10:22Z","lastTransitionTime":"2026-01-30T00:10:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.267852 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn"] Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.268323 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.270269 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.271900 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.271985 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.272691 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.305004 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/114b3662-8452-4317-b46d-8d377a279ff1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.305056 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/114b3662-8452-4317-b46d-8d377a279ff1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.305081 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/114b3662-8452-4317-b46d-8d377a279ff1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.305102 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/114b3662-8452-4317-b46d-8d377a279ff1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.305182 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/114b3662-8452-4317-b46d-8d377a279ff1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.316678 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-4t96d" podStartSLOduration=84.316654732 podStartE2EDuration="1m24.316654732s" podCreationTimestamp="2026-01-30 00:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.301935315 +0000 UTC m=+108.893407083" watchObservedRunningTime="2026-01-30 00:10:22.316654732 +0000 UTC m=+108.908126480" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.346990 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-xmv9h" podStartSLOduration=83.346968918 podStartE2EDuration="1m23.346968918s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.317081914 +0000 UTC m=+108.908553652" watchObservedRunningTime="2026-01-30 00:10:22.346968918 +0000 UTC m=+108.938440666" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.365924 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podStartSLOduration=83.365900146 podStartE2EDuration="1m23.365900146s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.365183247 +0000 UTC m=+108.956655005" watchObservedRunningTime="2026-01-30 00:10:22.365900146 +0000 UTC m=+108.957371894" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.401645 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.401610694 podStartE2EDuration="1m28.401610694s" podCreationTimestamp="2026-01-30 00:08:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.38282388 +0000 UTC m=+108.974295638" watchObservedRunningTime="2026-01-30 00:10:22.401610694 +0000 UTC m=+108.993082452" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.405923 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/114b3662-8452-4317-b46d-8d377a279ff1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.405987 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/114b3662-8452-4317-b46d-8d377a279ff1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.406008 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/114b3662-8452-4317-b46d-8d377a279ff1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.406032 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/114b3662-8452-4317-b46d-8d377a279ff1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.406114 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/114b3662-8452-4317-b46d-8d377a279ff1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.406193 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/114b3662-8452-4317-b46d-8d377a279ff1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.407457 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/114b3662-8452-4317-b46d-8d377a279ff1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.407583 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/114b3662-8452-4317-b46d-8d377a279ff1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.422673 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/114b3662-8452-4317-b46d-8d377a279ff1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.423312 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/114b3662-8452-4317-b46d-8d377a279ff1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-m84zn\" (UID: \"114b3662-8452-4317-b46d-8d377a279ff1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.446921 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-72hlw" podStartSLOduration=83.446895424 podStartE2EDuration="1m23.446895424s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.446307759 +0000 UTC m=+109.037779507" watchObservedRunningTime="2026-01-30 00:10:22.446895424 +0000 UTC m=+109.038367172" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.503796 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=89.503753607 podStartE2EDuration="1m29.503753607s" podCreationTimestamp="2026-01-30 00:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.487976234 +0000 UTC m=+109.079447982" watchObservedRunningTime="2026-01-30 00:10:22.503753607 +0000 UTC m=+109.095225355" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.519724 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=58.519700477 podStartE2EDuration="58.519700477s" podCreationTimestamp="2026-01-30 00:09:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.5046213 +0000 UTC m=+109.096093068" watchObservedRunningTime="2026-01-30 00:10:22.519700477 +0000 UTC m=+109.111172225" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.573554 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=86.573533301 podStartE2EDuration="1m26.573533301s" podCreationTimestamp="2026-01-30 00:08:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.572288319 +0000 UTC m=+109.163760067" watchObservedRunningTime="2026-01-30 00:10:22.573533301 +0000 UTC m=+109.165005049" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.574565 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=27.574555677 podStartE2EDuration="27.574555677s" podCreationTimestamp="2026-01-30 00:09:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.546864271 +0000 UTC m=+109.138336019" watchObservedRunningTime="2026-01-30 00:10:22.574555677 +0000 UTC m=+109.166027425" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.582707 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.618803 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-zrbl7" podStartSLOduration=84.618758049 podStartE2EDuration="1m24.618758049s" podCreationTimestamp="2026-01-30 00:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.605394827 +0000 UTC m=+109.196866575" watchObservedRunningTime="2026-01-30 00:10:22.618758049 +0000 UTC m=+109.210229797" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.633858 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pdvgr" podStartSLOduration=83.633832225 podStartE2EDuration="1m23.633832225s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.620155396 +0000 UTC m=+109.211627144" watchObservedRunningTime="2026-01-30 00:10:22.633832225 +0000 UTC m=+109.225303973" Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.809582 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" event={"ID":"114b3662-8452-4317-b46d-8d377a279ff1","Type":"ContainerStarted","Data":"64d70ccfbf673d04de5944d02e491f61ce4292cec5ef039b392b499d65350a7e"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.810248 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" event={"ID":"114b3662-8452-4317-b46d-8d377a279ff1","Type":"ContainerStarted","Data":"3038496055a97fd47bede7c1c0e35ff694a4d8af9d2299a94a5f813b88e0de1b"} Jan 30 00:10:22 crc kubenswrapper[4885]: I0130 00:10:22.836404 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-m84zn" podStartSLOduration=83.836378257 podStartE2EDuration="1m23.836378257s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:22.8350144 +0000 UTC m=+109.426486158" watchObservedRunningTime="2026-01-30 00:10:22.836378257 +0000 UTC m=+109.427850015" Jan 30 00:10:23 crc kubenswrapper[4885]: I0130 00:10:23.145348 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 22:13:19.021854463 +0000 UTC Jan 30 00:10:23 crc kubenswrapper[4885]: I0130 00:10:23.145461 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 30 00:10:23 crc kubenswrapper[4885]: I0130 00:10:23.154356 4885 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 30 00:10:24 crc kubenswrapper[4885]: I0130 00:10:24.141002 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:24 crc kubenswrapper[4885]: I0130 00:10:24.141957 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:24 crc kubenswrapper[4885]: I0130 00:10:24.142044 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:24 crc kubenswrapper[4885]: I0130 00:10:24.143861 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:24 crc kubenswrapper[4885]: E0130 00:10:24.144056 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:24 crc kubenswrapper[4885]: E0130 00:10:24.144196 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:24 crc kubenswrapper[4885]: E0130 00:10:24.144432 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:24 crc kubenswrapper[4885]: E0130 00:10:24.144557 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:26 crc kubenswrapper[4885]: I0130 00:10:26.141382 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:26 crc kubenswrapper[4885]: I0130 00:10:26.141450 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:26 crc kubenswrapper[4885]: I0130 00:10:26.141389 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:26 crc kubenswrapper[4885]: E0130 00:10:26.141569 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:26 crc kubenswrapper[4885]: E0130 00:10:26.141675 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:26 crc kubenswrapper[4885]: E0130 00:10:26.141872 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:26 crc kubenswrapper[4885]: I0130 00:10:26.141411 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:26 crc kubenswrapper[4885]: E0130 00:10:26.142679 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:28 crc kubenswrapper[4885]: I0130 00:10:28.141472 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:28 crc kubenswrapper[4885]: I0130 00:10:28.141535 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:28 crc kubenswrapper[4885]: I0130 00:10:28.141570 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:28 crc kubenswrapper[4885]: E0130 00:10:28.141733 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:28 crc kubenswrapper[4885]: I0130 00:10:28.141824 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:28 crc kubenswrapper[4885]: E0130 00:10:28.141974 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:28 crc kubenswrapper[4885]: E0130 00:10:28.142321 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:28 crc kubenswrapper[4885]: E0130 00:10:28.142837 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:28 crc kubenswrapper[4885]: I0130 00:10:28.145059 4885 scope.go:117] "RemoveContainer" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" Jan 30 00:10:28 crc kubenswrapper[4885]: E0130 00:10:28.145590 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-hwpvs_openshift-ovn-kubernetes(147e5e96-db98-498f-b4a4-927d73cb5db5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" Jan 30 00:10:30 crc kubenswrapper[4885]: I0130 00:10:30.142132 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:30 crc kubenswrapper[4885]: E0130 00:10:30.143054 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:30 crc kubenswrapper[4885]: I0130 00:10:30.142246 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:30 crc kubenswrapper[4885]: E0130 00:10:30.143397 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:30 crc kubenswrapper[4885]: I0130 00:10:30.142183 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:30 crc kubenswrapper[4885]: E0130 00:10:30.143655 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:30 crc kubenswrapper[4885]: I0130 00:10:30.142301 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:30 crc kubenswrapper[4885]: E0130 00:10:30.143943 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.142722 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.142844 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.142912 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:32 crc kubenswrapper[4885]: E0130 00:10:32.142978 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:32 crc kubenswrapper[4885]: E0130 00:10:32.143169 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:32 crc kubenswrapper[4885]: E0130 00:10:32.143333 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.143869 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:32 crc kubenswrapper[4885]: E0130 00:10:32.144221 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.846187 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/1.log" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.846936 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/0.log" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.847010 4885 generic.go:334] "Generic (PLEG): container finished" podID="3f11e547-11fd-417a-be4a-e4f37d8e7839" containerID="d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9" exitCode=1 Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.847137 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerDied","Data":"d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9"} Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.847204 4885 scope.go:117] "RemoveContainer" containerID="f703800add6b3e9db685b3d0275f26173d4e853dcd974c613e4fb09b22e83758" Jan 30 00:10:32 crc kubenswrapper[4885]: I0130 00:10:32.849074 4885 scope.go:117] "RemoveContainer" containerID="d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9" Jan 30 00:10:32 crc kubenswrapper[4885]: E0130 00:10:32.849534 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-xmv9h_openshift-multus(3f11e547-11fd-417a-be4a-e4f37d8e7839)\"" pod="openshift-multus/multus-xmv9h" podUID="3f11e547-11fd-417a-be4a-e4f37d8e7839" Jan 30 00:10:33 crc kubenswrapper[4885]: I0130 00:10:33.853610 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/1.log" Jan 30 00:10:34 crc kubenswrapper[4885]: E0130 00:10:34.086039 4885 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 30 00:10:34 crc kubenswrapper[4885]: I0130 00:10:34.141350 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:34 crc kubenswrapper[4885]: I0130 00:10:34.141398 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:34 crc kubenswrapper[4885]: I0130 00:10:34.141500 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:34 crc kubenswrapper[4885]: I0130 00:10:34.142344 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:34 crc kubenswrapper[4885]: E0130 00:10:34.144691 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:34 crc kubenswrapper[4885]: E0130 00:10:34.144936 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:34 crc kubenswrapper[4885]: E0130 00:10:34.145027 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:34 crc kubenswrapper[4885]: E0130 00:10:34.145451 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:34 crc kubenswrapper[4885]: E0130 00:10:34.252054 4885 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 00:10:36 crc kubenswrapper[4885]: I0130 00:10:36.141930 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:36 crc kubenswrapper[4885]: I0130 00:10:36.141930 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:36 crc kubenswrapper[4885]: E0130 00:10:36.142807 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:36 crc kubenswrapper[4885]: I0130 00:10:36.142073 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:36 crc kubenswrapper[4885]: E0130 00:10:36.142932 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:36 crc kubenswrapper[4885]: E0130 00:10:36.143211 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:36 crc kubenswrapper[4885]: I0130 00:10:36.142053 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:36 crc kubenswrapper[4885]: E0130 00:10:36.143754 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:38 crc kubenswrapper[4885]: I0130 00:10:38.141368 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:38 crc kubenswrapper[4885]: I0130 00:10:38.141538 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:38 crc kubenswrapper[4885]: E0130 00:10:38.141628 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:38 crc kubenswrapper[4885]: E0130 00:10:38.141741 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:38 crc kubenswrapper[4885]: I0130 00:10:38.141869 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:38 crc kubenswrapper[4885]: E0130 00:10:38.142074 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:38 crc kubenswrapper[4885]: I0130 00:10:38.142177 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:38 crc kubenswrapper[4885]: E0130 00:10:38.142314 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:39 crc kubenswrapper[4885]: E0130 00:10:39.253621 4885 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 00:10:40 crc kubenswrapper[4885]: I0130 00:10:40.141634 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:40 crc kubenswrapper[4885]: I0130 00:10:40.141687 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:40 crc kubenswrapper[4885]: I0130 00:10:40.141727 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:40 crc kubenswrapper[4885]: E0130 00:10:40.141946 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:40 crc kubenswrapper[4885]: E0130 00:10:40.142126 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:40 crc kubenswrapper[4885]: E0130 00:10:40.142265 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:40 crc kubenswrapper[4885]: I0130 00:10:40.142546 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:40 crc kubenswrapper[4885]: E0130 00:10:40.142858 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:42 crc kubenswrapper[4885]: I0130 00:10:42.141565 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:42 crc kubenswrapper[4885]: I0130 00:10:42.141658 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:42 crc kubenswrapper[4885]: I0130 00:10:42.141667 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:42 crc kubenswrapper[4885]: E0130 00:10:42.141757 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:42 crc kubenswrapper[4885]: I0130 00:10:42.141837 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:42 crc kubenswrapper[4885]: E0130 00:10:42.142026 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:42 crc kubenswrapper[4885]: E0130 00:10:42.142193 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:42 crc kubenswrapper[4885]: E0130 00:10:42.142372 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:43 crc kubenswrapper[4885]: I0130 00:10:43.141840 4885 scope.go:117] "RemoveContainer" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" Jan 30 00:10:43 crc kubenswrapper[4885]: I0130 00:10:43.891843 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/3.log" Jan 30 00:10:43 crc kubenswrapper[4885]: I0130 00:10:43.893321 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerStarted","Data":"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda"} Jan 30 00:10:43 crc kubenswrapper[4885]: I0130 00:10:43.893842 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:10:44 crc kubenswrapper[4885]: I0130 00:10:44.081330 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podStartSLOduration=105.081303868 podStartE2EDuration="1m45.081303868s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:43.918353456 +0000 UTC m=+130.509825274" watchObservedRunningTime="2026-01-30 00:10:44.081303868 +0000 UTC m=+130.672775626" Jan 30 00:10:44 crc kubenswrapper[4885]: I0130 00:10:44.082333 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-hg2nk"] Jan 30 00:10:44 crc kubenswrapper[4885]: I0130 00:10:44.082462 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:44 crc kubenswrapper[4885]: E0130 00:10:44.082573 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:44 crc kubenswrapper[4885]: I0130 00:10:44.140932 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:44 crc kubenswrapper[4885]: I0130 00:10:44.140996 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:44 crc kubenswrapper[4885]: E0130 00:10:44.142191 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:44 crc kubenswrapper[4885]: I0130 00:10:44.142251 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:44 crc kubenswrapper[4885]: E0130 00:10:44.143135 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:44 crc kubenswrapper[4885]: E0130 00:10:44.143232 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:44 crc kubenswrapper[4885]: E0130 00:10:44.254151 4885 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 30 00:10:45 crc kubenswrapper[4885]: I0130 00:10:45.143220 4885 scope.go:117] "RemoveContainer" containerID="d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9" Jan 30 00:10:45 crc kubenswrapper[4885]: I0130 00:10:45.902872 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/1.log" Jan 30 00:10:45 crc kubenswrapper[4885]: I0130 00:10:45.902969 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerStarted","Data":"2681aea94aa236ce8fbf8e060c1ff8dd558f4a63c3b6a0382c7b9f70ffa15280"} Jan 30 00:10:46 crc kubenswrapper[4885]: I0130 00:10:46.141554 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:46 crc kubenswrapper[4885]: I0130 00:10:46.141804 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:46 crc kubenswrapper[4885]: E0130 00:10:46.142011 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:46 crc kubenswrapper[4885]: I0130 00:10:46.141556 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:46 crc kubenswrapper[4885]: E0130 00:10:46.142470 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:46 crc kubenswrapper[4885]: E0130 00:10:46.142373 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:46 crc kubenswrapper[4885]: I0130 00:10:46.142815 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:46 crc kubenswrapper[4885]: E0130 00:10:46.142957 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:48 crc kubenswrapper[4885]: I0130 00:10:48.141230 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:48 crc kubenswrapper[4885]: I0130 00:10:48.141235 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:48 crc kubenswrapper[4885]: E0130 00:10:48.141405 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 30 00:10:48 crc kubenswrapper[4885]: I0130 00:10:48.141495 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:48 crc kubenswrapper[4885]: E0130 00:10:48.141633 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-hg2nk" podUID="313f7566-bae9-4b9c-8c30-9e3c7aef8364" Jan 30 00:10:48 crc kubenswrapper[4885]: I0130 00:10:48.141938 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:48 crc kubenswrapper[4885]: E0130 00:10:48.141942 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 30 00:10:48 crc kubenswrapper[4885]: E0130 00:10:48.142067 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.141877 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.141988 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.142214 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.142320 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.145891 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.146523 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.146550 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.146919 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.147058 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 30 00:10:50 crc kubenswrapper[4885]: I0130 00:10:50.147072 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.706388 4885 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.769436 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xw5nc"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.770444 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2f8ww"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.770721 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.770850 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.771483 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.771581 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.772048 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.772132 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.780173 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29495520-c9vgk"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.780749 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7hvjx"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.781159 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.781441 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.781666 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.782525 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.785109 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d4zrj"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.785566 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8skch"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.786099 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.786898 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.788179 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.788505 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.788596 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.788734 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.788744 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.788879 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.789040 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.789274 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.789331 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.790817 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.791413 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.791698 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.791966 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.805133 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.805761 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.806138 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.806463 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.806676 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.806850 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.807022 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.797774 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-qtxcm"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.808591 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.809136 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.809494 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-p87kw"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.810024 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.810519 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.810821 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.811086 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.812166 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.822823 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.823171 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.823401 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.823665 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.823862 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.824034 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.824050 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.824239 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.824590 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.825453 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.826219 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.827803 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.828739 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.829343 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.830105 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.830636 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.859297 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.859491 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.859650 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.859751 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.862121 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2bgb9"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.862505 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.862624 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863109 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863132 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863126 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863445 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863591 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863850 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863970 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.864093 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.864411 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.864639 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.864670 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.864861 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.865052 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.863353 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.865872 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.866588 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.871016 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.871265 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.872125 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.874082 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.881159 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4bhjw"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.881480 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.882428 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.882812 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.882927 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.882966 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.883621 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.883843 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.883926 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.883969 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884114 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884201 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884330 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884415 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884496 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884630 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884741 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884906 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885072 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885166 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885227 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885312 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884458 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885446 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885114 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.884637 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885163 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885131 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.883868 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.885849 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.886025 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.886415 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.886556 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.886793 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.891303 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.891410 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.895309 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.895746 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.898282 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.902882 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8vr8f"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.903576 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.925917 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-fvdhv"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926005 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8f8ad951-1f10-4883-8132-7afa6c3df767-audit-dir\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926057 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsj8w\" (UniqueName: \"kubernetes.io/projected/8f8ad951-1f10-4883-8132-7afa6c3df767-kube-api-access-wsj8w\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926083 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/19609ed6-3922-4b41-b02d-abf4fd2922a6-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-24xxs\" (UID: \"19609ed6-3922-4b41-b02d-abf4fd2922a6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926126 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-config\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926157 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-client-ca\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926227 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926255 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wgfv\" (UniqueName: \"kubernetes.io/projected/0eb0e632-fc50-4845-aa1b-4aab2bb7826b-kube-api-access-7wgfv\") pod \"downloads-7954f5f757-qtxcm\" (UID: \"0eb0e632-fc50-4845-aa1b-4aab2bb7826b\") " pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926278 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-encryption-config\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926299 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-etcd-client\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926324 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsz56\" (UniqueName: \"kubernetes.io/projected/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-kube-api-access-bsz56\") pod \"image-pruner-29495520-c9vgk\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926348 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-serving-cert\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926368 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926386 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwfns\" (UniqueName: \"kubernetes.io/projected/bb88aa5c-bc88-4447-b233-ad65df878fcd-kube-api-access-vwfns\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926413 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-auth-proxy-config\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926437 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-machine-approver-tls\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926608 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926636 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsdbc\" (UniqueName: \"kubernetes.io/projected/5af6d643-70f6-435e-b323-fac9aa37b466-kube-api-access-jsdbc\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926655 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926677 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926745 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926769 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926816 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926865 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926919 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926944 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27k44\" (UniqueName: \"kubernetes.io/projected/4a58c498-e399-40e4-a271-d42efd6c6745-kube-api-access-27k44\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926982 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-config\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927016 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a58c498-e399-40e4-a271-d42efd6c6745-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927041 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929bbe65-1902-453c-bebf-4e833b325ab1-serving-cert\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927069 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927100 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927132 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbfdw\" (UniqueName: \"kubernetes.io/projected/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-kube-api-access-hbfdw\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927275 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-image-import-ca\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927327 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-audit-policies\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927370 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mdf5\" (UniqueName: \"kubernetes.io/projected/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-kube-api-access-7mdf5\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927391 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927409 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-encryption-config\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927432 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927451 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-etcd-client\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927503 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-serving-cert\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927537 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-service-ca-bundle\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927542 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.928660 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.928760 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.927564 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-policies\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.926747 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.930878 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.930924 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-dir\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.930947 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.942698 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dnhsm"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.943452 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mfxlj"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.943920 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-jjb74"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.944038 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.944458 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.944708 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.943467 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9svwh\" (UniqueName: \"kubernetes.io/projected/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-kube-api-access-9svwh\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.945240 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.944469 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.945642 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a58c498-e399-40e4-a271-d42efd6c6745-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.945685 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-serviceca\") pod \"image-pruner-29495520-c9vgk\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.945993 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fcde4e44-9ff6-4539-84f3-a016080e13ce-node-pullsecrets\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946037 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5af6d643-70f6-435e-b323-fac9aa37b466-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946061 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946095 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-serving-cert\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946159 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-images\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946184 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5af6d643-70f6-435e-b323-fac9aa37b466-serving-cert\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946212 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7mw2\" (UniqueName: \"kubernetes.io/projected/929bbe65-1902-453c-bebf-4e833b325ab1-kube-api-access-j7mw2\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946231 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcde4e44-9ff6-4539-84f3-a016080e13ce-audit-dir\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946273 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946322 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2t74\" (UniqueName: \"kubernetes.io/projected/19609ed6-3922-4b41-b02d-abf4fd2922a6-kube-api-access-r2t74\") pod \"cluster-samples-operator-665b6dd947-24xxs\" (UID: \"19609ed6-3922-4b41-b02d-abf4fd2922a6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946349 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb88aa5c-bc88-4447-b233-ad65df878fcd-serving-cert\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946374 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-audit\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946429 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tclz\" (UniqueName: \"kubernetes.io/projected/fcde4e44-9ff6-4539-84f3-a016080e13ce-kube-api-access-8tclz\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946477 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-config\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946508 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-config\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946533 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l7xv\" (UniqueName: \"kubernetes.io/projected/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-kube-api-access-6l7xv\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946539 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946651 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-client-ca\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946713 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-config\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946740 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-config\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.946833 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.947025 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.947489 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.947510 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.947871 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.948027 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.948808 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.948995 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.949177 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.949375 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.949751 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-67ddr"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.949911 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.950114 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.950204 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.950960 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.951395 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.950207 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.952102 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.952739 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.952804 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.955814 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.956392 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.956755 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fkgqw"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.957211 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.957421 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.957580 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.961498 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hcx7w"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.962421 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m229k"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.963071 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.963749 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.964691 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.964956 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.968759 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.969092 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-f5g22"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.969498 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xw5nc"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.969550 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.969960 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.973022 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d4zrj"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.973042 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qtxcm"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.973052 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7hvjx"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.976994 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2f8ww"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.977012 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29495520-c9vgk"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.977024 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.979575 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.980051 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.980247 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.980449 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.980614 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.980643 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.980678 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.982436 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2bgb9"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.982459 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.985027 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8skch"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.986521 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.990076 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.991506 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9"] Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.993214 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 30 00:10:52 crc kubenswrapper[4885]: I0130 00:10:52.999028 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.003909 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-92xfx"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.007878 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-p87kw"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.008709 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.015099 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tmtj2"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.015412 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.017107 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.018400 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dnhsm"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.018710 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8vr8f"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.020907 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.020972 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.022133 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.023368 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-67ddr"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.024395 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mfxlj"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.025516 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4bhjw"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.026669 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.028171 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.029104 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.030161 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.032016 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-f5g22"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.033286 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.033473 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-jjb74"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.035861 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.037441 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.038572 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.040043 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.041329 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.042804 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hcx7w"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.043991 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.045075 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m229k"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.046234 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tmtj2"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.047297 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-6q2hj"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.047624 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-serving-cert\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.047753 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-service-ca-bundle\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.047869 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-policies\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.047950 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048028 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/063f4cfd-666f-4493-8678-df74e0347ba3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048148 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9svwh\" (UniqueName: \"kubernetes.io/projected/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-kube-api-access-9svwh\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048225 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-dir\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048298 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048374 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a58c498-e399-40e4-a271-d42efd6c6745-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048446 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-serviceca\") pod \"image-pruner-29495520-c9vgk\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048538 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fcde4e44-9ff6-4539-84f3-a016080e13ce-node-pullsecrets\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048621 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5af6d643-70f6-435e-b323-fac9aa37b466-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048695 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048776 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-serving-cert\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048903 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-images\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048978 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db4tl\" (UniqueName: \"kubernetes.io/projected/a6dfcb67-43fe-46d9-9349-c581afa2d82f-kube-api-access-db4tl\") pod \"control-plane-machine-set-operator-78cbb6b69f-6fm5q\" (UID: \"a6dfcb67-43fe-46d9-9349-c581afa2d82f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049057 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5af6d643-70f6-435e-b323-fac9aa37b466-serving-cert\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049137 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7mw2\" (UniqueName: \"kubernetes.io/projected/929bbe65-1902-453c-bebf-4e833b325ab1-kube-api-access-j7mw2\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049215 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcde4e44-9ff6-4539-84f3-a016080e13ce-audit-dir\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049271 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049296 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049422 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-service-ca-bundle\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049442 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12453952-578c-4ef3-97bd-eee389ec3d91-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049677 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2t74\" (UniqueName: \"kubernetes.io/projected/19609ed6-3922-4b41-b02d-abf4fd2922a6-kube-api-access-r2t74\") pod \"cluster-samples-operator-665b6dd947-24xxs\" (UID: \"19609ed6-3922-4b41-b02d-abf4fd2922a6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049770 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb88aa5c-bc88-4447-b233-ad65df878fcd-serving-cert\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049875 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v86cx\" (UniqueName: \"kubernetes.io/projected/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-kube-api-access-v86cx\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049972 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzvnk\" (UniqueName: \"kubernetes.io/projected/9d8eb84b-e3c3-46ab-b9bb-29f63d57284e-kube-api-access-lzvnk\") pod \"package-server-manager-789f6589d5-vjhs9\" (UID: \"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050064 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-audit\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050157 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tclz\" (UniqueName: \"kubernetes.io/projected/fcde4e44-9ff6-4539-84f3-a016080e13ce-kube-api-access-8tclz\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050227 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fcde4e44-9ff6-4539-84f3-a016080e13ce-node-pullsecrets\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048492 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6q2hj"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050325 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fkgqw"] Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050330 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-images\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049554 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-serviceca\") pod \"image-pruner-29495520-c9vgk\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048519 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-dir\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050358 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fcde4e44-9ff6-4539-84f3-a016080e13ce-audit-dir\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.049889 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a58c498-e399-40e4-a271-d42efd6c6745-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050173 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5af6d643-70f6-435e-b323-fac9aa37b466-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048580 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050608 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-config\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050696 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-config\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050789 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l7xv\" (UniqueName: \"kubernetes.io/projected/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-kube-api-access-6l7xv\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050908 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-client-ca\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.050997 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051087 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051163 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-config\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051236 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-config\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051317 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051435 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051513 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a6dfcb67-43fe-46d9-9349-c581afa2d82f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6fm5q\" (UID: \"a6dfcb67-43fe-46d9-9349-c581afa2d82f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051532 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-audit\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051648 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8f8ad951-1f10-4883-8132-7afa6c3df767-audit-dir\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051681 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsj8w\" (UniqueName: \"kubernetes.io/projected/8f8ad951-1f10-4883-8132-7afa6c3df767-kube-api-access-wsj8w\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051712 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-config\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051714 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/063f4cfd-666f-4493-8678-df74e0347ba3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051810 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-client-ca\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051841 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/19609ed6-3922-4b41-b02d-abf4fd2922a6-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-24xxs\" (UID: \"19609ed6-3922-4b41-b02d-abf4fd2922a6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051864 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-config\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051888 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8eb84b-e3c3-46ab-b9bb-29f63d57284e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vjhs9\" (UID: \"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051920 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7d5t\" (UniqueName: \"kubernetes.io/projected/063f4cfd-666f-4493-8678-df74e0347ba3-kube-api-access-n7d5t\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051955 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.051985 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wgfv\" (UniqueName: \"kubernetes.io/projected/0eb0e632-fc50-4845-aa1b-4aab2bb7826b-kube-api-access-7wgfv\") pod \"downloads-7954f5f757-qtxcm\" (UID: \"0eb0e632-fc50-4845-aa1b-4aab2bb7826b\") " pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052005 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-encryption-config\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052055 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-etcd-client\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052075 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsz56\" (UniqueName: \"kubernetes.io/projected/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-kube-api-access-bsz56\") pod \"image-pruner-29495520-c9vgk\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052096 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-serving-cert\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052117 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052186 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwfns\" (UniqueName: \"kubernetes.io/projected/bb88aa5c-bc88-4447-b233-ad65df878fcd-kube-api-access-vwfns\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052207 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-auth-proxy-config\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052226 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052249 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-machine-approver-tls\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052272 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052295 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12453952-578c-4ef3-97bd-eee389ec3d91-config\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052320 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsdbc\" (UniqueName: \"kubernetes.io/projected/5af6d643-70f6-435e-b323-fac9aa37b466-kube-api-access-jsdbc\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052340 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052358 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052378 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/063f4cfd-666f-4493-8678-df74e0347ba3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052424 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052443 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-images\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052465 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052485 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052508 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052527 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052546 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27k44\" (UniqueName: \"kubernetes.io/projected/4a58c498-e399-40e4-a271-d42efd6c6745-kube-api-access-27k44\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052564 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-config\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052587 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a58c498-e399-40e4-a271-d42efd6c6745-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052604 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929bbe65-1902-453c-bebf-4e833b325ab1-serving-cert\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052622 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052640 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052661 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2w84\" (UniqueName: \"kubernetes.io/projected/28808760-39b2-4b17-82b4-d3b6783ed31c-kube-api-access-x2w84\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052667 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-client-ca\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052686 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbfdw\" (UniqueName: \"kubernetes.io/projected/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-kube-api-access-hbfdw\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052728 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-image-import-ca\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052761 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-audit-policies\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052820 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/12453952-578c-4ef3-97bd-eee389ec3d91-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052860 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mdf5\" (UniqueName: \"kubernetes.io/projected/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-kube-api-access-7mdf5\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052889 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-etcd-client\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052917 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052937 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-encryption-config\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052960 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.052998 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/28808760-39b2-4b17-82b4-d3b6783ed31c-proxy-tls\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.053978 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.054087 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5af6d643-70f6-435e-b323-fac9aa37b466-serving-cert\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.055238 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb88aa5c-bc88-4447-b233-ad65df878fcd-serving-cert\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.055544 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.055658 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.056086 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8f8ad951-1f10-4883-8132-7afa6c3df767-audit-dir\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.048652 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-policies\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.056406 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-config\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.056635 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.056856 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-serving-cert\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.057243 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-config\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.057301 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-audit-policies\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.058306 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-image-import-ca\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.058351 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.058685 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-config\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.058720 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.059019 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8f8ad951-1f10-4883-8132-7afa6c3df767-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.059137 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.059238 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.059696 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-client-ca\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.060007 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-auth-proxy-config\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.061457 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-etcd-client\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.061549 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-config\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.062889 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-config\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.062988 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a58c498-e399-40e4-a271-d42efd6c6745-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.063039 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-etcd-client\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.063394 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.063802 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/19609ed6-3922-4b41-b02d-abf4fd2922a6-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-24xxs\" (UID: \"19609ed6-3922-4b41-b02d-abf4fd2922a6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.064689 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-machine-approver-tls\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.064949 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.065526 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bb88aa5c-bc88-4447-b233-ad65df878fcd-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.066377 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fcde4e44-9ff6-4539-84f3-a016080e13ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.066498 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.066861 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-encryption-config\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.066877 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.067121 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.067731 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.067749 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fcde4e44-9ff6-4539-84f3-a016080e13ce-serving-cert\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.068867 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929bbe65-1902-453c-bebf-4e833b325ab1-serving-cert\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.069827 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-encryption-config\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.072651 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8ad951-1f10-4883-8132-7afa6c3df767-serving-cert\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.073363 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.074361 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.094187 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.114396 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.134331 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.153873 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169797 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169839 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169873 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169899 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a6dfcb67-43fe-46d9-9349-c581afa2d82f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6fm5q\" (UID: \"a6dfcb67-43fe-46d9-9349-c581afa2d82f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169938 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/063f4cfd-666f-4493-8678-df74e0347ba3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169961 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7d5t\" (UniqueName: \"kubernetes.io/projected/063f4cfd-666f-4493-8678-df74e0347ba3-kube-api-access-n7d5t\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.169987 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8eb84b-e3c3-46ab-b9bb-29f63d57284e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vjhs9\" (UID: \"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170035 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170071 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12453952-578c-4ef3-97bd-eee389ec3d91-config\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170290 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/063f4cfd-666f-4493-8678-df74e0347ba3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170358 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-images\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170399 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2w84\" (UniqueName: \"kubernetes.io/projected/28808760-39b2-4b17-82b4-d3b6783ed31c-kube-api-access-x2w84\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170435 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/12453952-578c-4ef3-97bd-eee389ec3d91-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170473 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170498 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/28808760-39b2-4b17-82b4-d3b6783ed31c-proxy-tls\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170525 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/063f4cfd-666f-4493-8678-df74e0347ba3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170573 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db4tl\" (UniqueName: \"kubernetes.io/projected/a6dfcb67-43fe-46d9-9349-c581afa2d82f-kube-api-access-db4tl\") pod \"control-plane-machine-set-operator-78cbb6b69f-6fm5q\" (UID: \"a6dfcb67-43fe-46d9-9349-c581afa2d82f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170646 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12453952-578c-4ef3-97bd-eee389ec3d91-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170691 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v86cx\" (UniqueName: \"kubernetes.io/projected/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-kube-api-access-v86cx\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.170720 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzvnk\" (UniqueName: \"kubernetes.io/projected/9d8eb84b-e3c3-46ab-b9bb-29f63d57284e-kube-api-access-lzvnk\") pod \"package-server-manager-789f6589d5-vjhs9\" (UID: \"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.171121 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.171633 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/063f4cfd-666f-4493-8678-df74e0347ba3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.173626 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.174411 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/063f4cfd-666f-4493-8678-df74e0347ba3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.193463 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.213247 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.233323 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.253911 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.273724 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.299997 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.314392 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.323171 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a6dfcb67-43fe-46d9-9349-c581afa2d82f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6fm5q\" (UID: \"a6dfcb67-43fe-46d9-9349-c581afa2d82f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.333836 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.353815 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.374040 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.394281 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.413097 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.432737 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.453046 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.472803 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.492593 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.521896 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.533570 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.554999 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.574017 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.605543 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.613228 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.654312 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.673462 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.693983 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.713657 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.726301 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/12453952-578c-4ef3-97bd-eee389ec3d91-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.733315 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.741322 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12453952-578c-4ef3-97bd-eee389ec3d91-config\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.754417 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.765308 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8eb84b-e3c3-46ab-b9bb-29f63d57284e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vjhs9\" (UID: \"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.774698 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.794216 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.814232 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.834039 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.855875 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.874166 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.892826 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.913046 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.933304 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.953580 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.961654 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.972214 4885 request.go:700] Waited for 1.020173718s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca-operator/configmaps?fieldSelector=metadata.name%3Dservice-ca-operator-config&limit=500&resourceVersion=0 Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.973637 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 30 00:10:53 crc kubenswrapper[4885]: I0130 00:10:53.993485 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.013595 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.023875 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.033822 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.054260 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.075472 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.095411 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.113583 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.133830 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.154026 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: E0130 00:10:54.171034 4885 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Jan 30 00:10:54 crc kubenswrapper[4885]: E0130 00:10:54.171083 4885 secret.go:188] Couldn't get secret openshift-machine-config-operator/mco-proxy-tls: failed to sync secret cache: timed out waiting for the condition Jan 30 00:10:54 crc kubenswrapper[4885]: E0130 00:10:54.171130 4885 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/machine-config-operator-images: failed to sync configmap cache: timed out waiting for the condition Jan 30 00:10:54 crc kubenswrapper[4885]: E0130 00:10:54.171145 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-webhook-certs podName:d806b98c-1d4e-42b2-9da3-4afc4ca6e255 nodeName:}" failed. No retries permitted until 2026-01-30 00:10:54.671118768 +0000 UTC m=+141.262590516 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-webhook-certs") pod "multus-admission-controller-857f4d67dd-hcx7w" (UID: "d806b98c-1d4e-42b2-9da3-4afc4ca6e255") : failed to sync secret cache: timed out waiting for the condition Jan 30 00:10:54 crc kubenswrapper[4885]: E0130 00:10:54.171164 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28808760-39b2-4b17-82b4-d3b6783ed31c-proxy-tls podName:28808760-39b2-4b17-82b4-d3b6783ed31c nodeName:}" failed. No retries permitted until 2026-01-30 00:10:54.671158139 +0000 UTC m=+141.262629887 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/28808760-39b2-4b17-82b4-d3b6783ed31c-proxy-tls") pod "machine-config-operator-74547568cd-m229k" (UID: "28808760-39b2-4b17-82b4-d3b6783ed31c") : failed to sync secret cache: timed out waiting for the condition Jan 30 00:10:54 crc kubenswrapper[4885]: E0130 00:10:54.171176 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-images podName:28808760-39b2-4b17-82b4-d3b6783ed31c nodeName:}" failed. No retries permitted until 2026-01-30 00:10:54.67117144 +0000 UTC m=+141.262643188 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-images") pod "machine-config-operator-74547568cd-m229k" (UID: "28808760-39b2-4b17-82b4-d3b6783ed31c") : failed to sync configmap cache: timed out waiting for the condition Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.173644 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.194012 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.213802 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.232963 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.253978 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.274195 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.293087 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.315001 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.334460 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.354084 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.373339 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.394162 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.413528 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.433804 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.454228 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.474326 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.495108 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.515561 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.535035 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.554446 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.574885 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.593720 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.614123 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.632970 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.673638 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.695014 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.695200 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/28808760-39b2-4b17-82b4-d3b6783ed31c-proxy-tls\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.695498 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.695724 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-images\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.697080 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/28808760-39b2-4b17-82b4-d3b6783ed31c-images\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.706964 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/28808760-39b2-4b17-82b4-d3b6783ed31c-proxy-tls\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.710042 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.713878 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.736379 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.753176 4885 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.774580 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.811665 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9svwh\" (UniqueName: \"kubernetes.io/projected/93771e95-54e2-4f25-86b1-c1f6b0f18a8f-kube-api-access-9svwh\") pod \"machine-approver-56656f9798-cd22w\" (UID: \"93771e95-54e2-4f25-86b1-c1f6b0f18a8f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.829333 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2t74\" (UniqueName: \"kubernetes.io/projected/19609ed6-3922-4b41-b02d-abf4fd2922a6-kube-api-access-r2t74\") pod \"cluster-samples-operator-665b6dd947-24xxs\" (UID: \"19609ed6-3922-4b41-b02d-abf4fd2922a6\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.853568 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7mw2\" (UniqueName: \"kubernetes.io/projected/929bbe65-1902-453c-bebf-4e833b325ab1-kube-api-access-j7mw2\") pod \"route-controller-manager-6576b87f9c-mhgj8\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.856404 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.872404 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tclz\" (UniqueName: \"kubernetes.io/projected/fcde4e44-9ff6-4539-84f3-a016080e13ce-kube-api-access-8tclz\") pod \"apiserver-76f77b778f-xw5nc\" (UID: \"fcde4e44-9ff6-4539-84f3-a016080e13ce\") " pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.873083 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.893890 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.914111 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.927496 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.933686 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.980895 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l7xv\" (UniqueName: \"kubernetes.io/projected/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-kube-api-access-6l7xv\") pod \"oauth-openshift-558db77b4-p87kw\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.992079 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbfdw\" (UniqueName: \"kubernetes.io/projected/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-kube-api-access-hbfdw\") pod \"controller-manager-879f6c89f-2f8ww\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.992171 4885 request.go:700] Waited for 1.935061747s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-oauth-apiserver/serviceaccounts/oauth-apiserver-sa/token Jan 30 00:10:54 crc kubenswrapper[4885]: I0130 00:10:54.997641 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.009915 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsj8w\" (UniqueName: \"kubernetes.io/projected/8f8ad951-1f10-4883-8132-7afa6c3df767-kube-api-access-wsj8w\") pod \"apiserver-7bbb656c7d-wn7lm\" (UID: \"8f8ad951-1f10-4883-8132-7afa6c3df767\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.030512 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.032656 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwfns\" (UniqueName: \"kubernetes.io/projected/bb88aa5c-bc88-4447-b233-ad65df878fcd-kube-api-access-vwfns\") pod \"authentication-operator-69f744f599-7hvjx\" (UID: \"bb88aa5c-bc88-4447-b233-ad65df878fcd\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.056212 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27k44\" (UniqueName: \"kubernetes.io/projected/4a58c498-e399-40e4-a271-d42efd6c6745-kube-api-access-27k44\") pod \"openshift-controller-manager-operator-756b6f6bc6-b5gjz\" (UID: \"4a58c498-e399-40e4-a271-d42efd6c6745\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.086462 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mdf5\" (UniqueName: \"kubernetes.io/projected/51bce3b6-6a4d-45ea-89a7-bf5cf50d7610-kube-api-access-7mdf5\") pod \"machine-api-operator-5694c8668f-d4zrj\" (UID: \"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.088109 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.091544 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsz56\" (UniqueName: \"kubernetes.io/projected/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-kube-api-access-bsz56\") pod \"image-pruner-29495520-c9vgk\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.095573 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.105177 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.110443 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsdbc\" (UniqueName: \"kubernetes.io/projected/5af6d643-70f6-435e-b323-fac9aa37b466-kube-api-access-jsdbc\") pod \"openshift-config-operator-7777fb866f-8skch\" (UID: \"5af6d643-70f6-435e-b323-fac9aa37b466\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.112865 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.120902 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.130186 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.131508 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wgfv\" (UniqueName: \"kubernetes.io/projected/0eb0e632-fc50-4845-aa1b-4aab2bb7826b-kube-api-access-7wgfv\") pod \"downloads-7954f5f757-qtxcm\" (UID: \"0eb0e632-fc50-4845-aa1b-4aab2bb7826b\") " pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.139978 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:55 crc kubenswrapper[4885]: W0130 00:10:55.141374 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93771e95_54e2_4f25_86b1_c1f6b0f18a8f.slice/crio-307b7635b0aca3b2a7adb9466395fb67d193d415abc79c7139aee18b2450ef1a WatchSource:0}: Error finding container 307b7635b0aca3b2a7adb9466395fb67d193d415abc79c7139aee18b2450ef1a: Status 404 returned error can't find the container with id 307b7635b0aca3b2a7adb9466395fb67d193d415abc79c7139aee18b2450ef1a Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.146123 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.159639 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7d5t\" (UniqueName: \"kubernetes.io/projected/063f4cfd-666f-4493-8678-df74e0347ba3-kube-api-access-n7d5t\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.178547 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/063f4cfd-666f-4493-8678-df74e0347ba3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-r9hww\" (UID: \"063f4cfd-666f-4493-8678-df74e0347ba3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.195634 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.204592 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xw5nc"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.207321 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b60a4e04-0955-4b9a-9165-92ee6a82b1a6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-8knvk\" (UID: \"b60a4e04-0955-4b9a-9165-92ee6a82b1a6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.215803 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2w84\" (UniqueName: \"kubernetes.io/projected/28808760-39b2-4b17-82b4-d3b6783ed31c-kube-api-access-x2w84\") pod \"machine-config-operator-74547568cd-m229k\" (UID: \"28808760-39b2-4b17-82b4-d3b6783ed31c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.218374 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.232297 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db4tl\" (UniqueName: \"kubernetes.io/projected/a6dfcb67-43fe-46d9-9349-c581afa2d82f-kube-api-access-db4tl\") pod \"control-plane-machine-set-operator-78cbb6b69f-6fm5q\" (UID: \"a6dfcb67-43fe-46d9-9349-c581afa2d82f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.253711 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.254725 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/12453952-578c-4ef3-97bd-eee389ec3d91-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9wwjr\" (UID: \"12453952-578c-4ef3-97bd-eee389ec3d91\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.278363 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v86cx\" (UniqueName: \"kubernetes.io/projected/d806b98c-1d4e-42b2-9da3-4afc4ca6e255-kube-api-access-v86cx\") pod \"multus-admission-controller-857f4d67dd-hcx7w\" (UID: \"d806b98c-1d4e-42b2-9da3-4afc4ca6e255\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.283730 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.295932 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.301959 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzvnk\" (UniqueName: \"kubernetes.io/projected/9d8eb84b-e3c3-46ab-b9bb-29f63d57284e-kube-api-access-lzvnk\") pod \"package-server-manager-789f6589d5-vjhs9\" (UID: \"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.329291 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.379514 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.388745 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.399611 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29495520-c9vgk"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410052 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4b65245c-0e70-4a17-b739-9c08059b07dc-proxy-tls\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410096 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvcxh\" (UniqueName: \"kubernetes.io/projected/c58668af-88e5-4058-9571-5ce0f3fd7e9f-kube-api-access-jvcxh\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410139 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-oauth-serving-cert\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410156 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f40ec490-af02-4935-bb20-3698b71fce88-srv-cert\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410172 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f40ec490-af02-4935-bb20-3698b71fce88-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410192 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxjkf\" (UniqueName: \"kubernetes.io/projected/a345d711-84e3-47c8-a255-f833dfaca7fa-kube-api-access-wxjkf\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410217 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-ca\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410236 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410251 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-722r9\" (UniqueName: \"kubernetes.io/projected/9c216d7c-42b5-45fb-a68d-8e38d622978f-kube-api-access-722r9\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410270 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-profile-collector-cert\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410296 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-stats-auth\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410322 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410350 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv77v\" (UniqueName: \"kubernetes.io/projected/1e053140-7618-4580-8899-a121dd6759f8-kube-api-access-bv77v\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410377 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3f0f887-4427-41fa-a495-470f6a1da8ae-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410393 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b65245c-0e70-4a17-b739-9c08059b07dc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410421 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwnvf\" (UniqueName: \"kubernetes.io/projected/7ace7d0c-1b65-484b-9724-a03aded5ec7f-kube-api-access-bwnvf\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410449 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410478 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stlkn\" (UniqueName: \"kubernetes.io/projected/a7093ca4-c09c-4031-ba6f-e7fc85890480-kube-api-access-stlkn\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410520 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/361af061-ac94-45f6-af48-9e6f0a5a89e1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410552 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ace7d0c-1b65-484b-9724-a03aded5ec7f-serving-cert\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410596 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9c216d7c-42b5-45fb-a68d-8e38d622978f-metrics-tls\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410622 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scjzj\" (UniqueName: \"kubernetes.io/projected/5f617943-c63f-4006-907c-dc2584eac526-kube-api-access-scjzj\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410663 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72tlm\" (UniqueName: \"kubernetes.io/projected/b8533351-ed92-4278-89ed-8e3f31aecb20-kube-api-access-72tlm\") pod \"dns-operator-744455d44c-8vr8f\" (UID: \"b8533351-ed92-4278-89ed-8e3f31aecb20\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410690 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/14128854-7eae-4729-90c4-10370fde7337-tmpfs\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410728 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f617943-c63f-4006-907c-dc2584eac526-serving-cert\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410745 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxgrc\" (UniqueName: \"kubernetes.io/projected/361af061-ac94-45f6-af48-9e6f0a5a89e1-kube-api-access-kxgrc\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410763 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-config\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410827 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhk4q\" (UniqueName: \"kubernetes.io/projected/f40ec490-af02-4935-bb20-3698b71fce88-kube-api-access-fhk4q\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410846 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/022b7871-8bf2-4432-9f33-d816fdd16fce-signing-key\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410863 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-trusted-ca-bundle\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410895 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410935 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-trusted-ca\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410954 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6h4t\" (UniqueName: \"kubernetes.io/projected/14128854-7eae-4729-90c4-10370fde7337-kube-api-access-l6h4t\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410971 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-serving-cert\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.410987 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-metrics-tls\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411025 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-tls\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411044 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-service-ca\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411060 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-trusted-ca\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411076 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-service-ca\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411111 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhtdh\" (UniqueName: \"kubernetes.io/projected/fe084dfc-7335-4d94-8b7c-09637c52b19f-kube-api-access-xhtdh\") pod \"migrator-59844c95c7-9dggz\" (UID: \"fe084dfc-7335-4d94-8b7c-09637c52b19f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411126 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-default-certificate\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411143 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-config\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411162 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-config\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411199 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3f0f887-4427-41fa-a495-470f6a1da8ae-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411218 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-metrics-certs\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411235 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-oauth-config\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411252 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-srv-cert\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411269 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d551409-85f7-4c8f-8144-64ac0bb1f155-config-volume\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411287 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b8533351-ed92-4278-89ed-8e3f31aecb20-metrics-tls\") pod \"dns-operator-744455d44c-8vr8f\" (UID: \"b8533351-ed92-4278-89ed-8e3f31aecb20\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411315 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411332 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg4qt\" (UniqueName: \"kubernetes.io/projected/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-kube-api-access-qg4qt\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411367 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-bound-sa-token\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411383 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvhbh\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-kube-api-access-gvhbh\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411398 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411437 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361af061-ac94-45f6-af48-9e6f0a5a89e1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411471 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-trusted-ca\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411487 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjgwg\" (UniqueName: \"kubernetes.io/projected/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-kube-api-access-rjgwg\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411511 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/022b7871-8bf2-4432-9f33-d816fdd16fce-signing-cabundle\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411542 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqjz7\" (UniqueName: \"kubernetes.io/projected/022b7871-8bf2-4432-9f33-d816fdd16fce-kube-api-access-bqjz7\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411570 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s55g7\" (UniqueName: \"kubernetes.io/projected/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-kube-api-access-s55g7\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411587 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-config\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411622 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e053140-7618-4580-8899-a121dd6759f8-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411637 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14128854-7eae-4729-90c4-10370fde7337-apiservice-cert\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411656 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f617943-c63f-4006-907c-dc2584eac526-config\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411671 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e053140-7618-4580-8899-a121dd6759f8-config\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411695 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-client\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411713 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c216d7c-42b5-45fb-a68d-8e38d622978f-config-volume\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411732 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-certificates\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411748 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14128854-7eae-4729-90c4-10370fde7337-webhook-cert\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411770 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7093ca4-c09c-4031-ba6f-e7fc85890480-service-ca-bundle\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411814 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fbbj\" (UniqueName: \"kubernetes.io/projected/4d551409-85f7-4c8f-8144-64ac0bb1f155-kube-api-access-4fbbj\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411850 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d551409-85f7-4c8f-8144-64ac0bb1f155-secret-volume\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411882 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-serving-cert\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.411901 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w8d8\" (UniqueName: \"kubernetes.io/projected/4b65245c-0e70-4a17-b739-9c08059b07dc-kube-api-access-9w8d8\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.416652 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:55.916634774 +0000 UTC m=+142.508106732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: W0130 00:10:55.470312 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc59baa8a_ba27_4ef6_9d63_a0a25b597f7e.slice/crio-43b4a4ac1de95958d0cbf74cc941926ee82f3928958882a5d089446522eb360a WatchSource:0}: Error finding container 43b4a4ac1de95958d0cbf74cc941926ee82f3928958882a5d089446522eb360a: Status 404 returned error can't find the container with id 43b4a4ac1de95958d0cbf74cc941926ee82f3928958882a5d089446522eb360a Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.473194 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7hvjx"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513226 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513479 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w8d8\" (UniqueName: \"kubernetes.io/projected/4b65245c-0e70-4a17-b739-9c08059b07dc-kube-api-access-9w8d8\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513531 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-registration-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513567 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4b65245c-0e70-4a17-b739-9c08059b07dc-proxy-tls\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513627 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvcxh\" (UniqueName: \"kubernetes.io/projected/c58668af-88e5-4058-9571-5ce0f3fd7e9f-kube-api-access-jvcxh\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513704 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-oauth-serving-cert\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513729 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f40ec490-af02-4935-bb20-3698b71fce88-srv-cert\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513781 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f40ec490-af02-4935-bb20-3698b71fce88-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.513954 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxjkf\" (UniqueName: \"kubernetes.io/projected/a345d711-84e3-47c8-a255-f833dfaca7fa-kube-api-access-wxjkf\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514007 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-ca\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514034 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514074 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-722r9\" (UniqueName: \"kubernetes.io/projected/9c216d7c-42b5-45fb-a68d-8e38d622978f-kube-api-access-722r9\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514102 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-profile-collector-cert\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514130 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-stats-auth\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514205 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv77v\" (UniqueName: \"kubernetes.io/projected/1e053140-7618-4580-8899-a121dd6759f8-kube-api-access-bv77v\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514261 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514331 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3f0f887-4427-41fa-a495-470f6a1da8ae-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514356 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/91f35d98-710b-4439-9c13-91f00f6646c7-cert\") pod \"ingress-canary-6q2hj\" (UID: \"91f35d98-710b-4439-9c13-91f00f6646c7\") " pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514412 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b65245c-0e70-4a17-b739-9c08059b07dc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514439 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwnvf\" (UniqueName: \"kubernetes.io/projected/7ace7d0c-1b65-484b-9724-a03aded5ec7f-kube-api-access-bwnvf\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514463 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514505 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stlkn\" (UniqueName: \"kubernetes.io/projected/a7093ca4-c09c-4031-ba6f-e7fc85890480-kube-api-access-stlkn\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514541 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/361af061-ac94-45f6-af48-9e6f0a5a89e1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514603 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ace7d0c-1b65-484b-9724-a03aded5ec7f-serving-cert\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514675 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9c216d7c-42b5-45fb-a68d-8e38d622978f-metrics-tls\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514703 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scjzj\" (UniqueName: \"kubernetes.io/projected/5f617943-c63f-4006-907c-dc2584eac526-kube-api-access-scjzj\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514746 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-socket-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514848 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/14128854-7eae-4729-90c4-10370fde7337-tmpfs\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514876 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72tlm\" (UniqueName: \"kubernetes.io/projected/b8533351-ed92-4278-89ed-8e3f31aecb20-kube-api-access-72tlm\") pod \"dns-operator-744455d44c-8vr8f\" (UID: \"b8533351-ed92-4278-89ed-8e3f31aecb20\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514937 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f617943-c63f-4006-907c-dc2584eac526-serving-cert\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.514959 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxgrc\" (UniqueName: \"kubernetes.io/projected/361af061-ac94-45f6-af48-9e6f0a5a89e1-kube-api-access-kxgrc\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515039 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-config\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515097 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhk4q\" (UniqueName: \"kubernetes.io/projected/f40ec490-af02-4935-bb20-3698b71fce88-kube-api-access-fhk4q\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515120 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/022b7871-8bf2-4432-9f33-d816fdd16fce-signing-key\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515161 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-trusted-ca-bundle\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515184 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-plugins-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515264 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-trusted-ca\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515288 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6h4t\" (UniqueName: \"kubernetes.io/projected/14128854-7eae-4729-90c4-10370fde7337-kube-api-access-l6h4t\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515343 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-serving-cert\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515367 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-tls\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515410 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-service-ca\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515433 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-trusted-ca\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515455 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-metrics-tls\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515529 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-service-ca\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515576 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhtdh\" (UniqueName: \"kubernetes.io/projected/fe084dfc-7335-4d94-8b7c-09637c52b19f-kube-api-access-xhtdh\") pod \"migrator-59844c95c7-9dggz\" (UID: \"fe084dfc-7335-4d94-8b7c-09637c52b19f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.515600 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-default-certificate\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.516720 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-ca\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.516894 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.01687498 +0000 UTC m=+142.608346738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.518809 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptjps\" (UniqueName: \"kubernetes.io/projected/91f35d98-710b-4439-9c13-91f00f6646c7-kube-api-access-ptjps\") pod \"ingress-canary-6q2hj\" (UID: \"91f35d98-710b-4439-9c13-91f00f6646c7\") " pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.518864 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdcb9\" (UniqueName: \"kubernetes.io/projected/d6df145a-2f02-4834-bb15-07a9b4e70784-kube-api-access-sdcb9\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.518889 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-config\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.518934 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-config\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.518970 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3f0f887-4427-41fa-a495-470f6a1da8ae-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.519564 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/14128854-7eae-4729-90c4-10370fde7337-tmpfs\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.521046 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-oauth-serving-cert\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.522968 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/361af061-ac94-45f6-af48-9e6f0a5a89e1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.521625 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-metrics-certs\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523210 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-oauth-config\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523306 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-srv-cert\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523458 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d551409-85f7-4c8f-8144-64ac0bb1f155-config-volume\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523572 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b8533351-ed92-4278-89ed-8e3f31aecb20-metrics-tls\") pod \"dns-operator-744455d44c-8vr8f\" (UID: \"b8533351-ed92-4278-89ed-8e3f31aecb20\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523672 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-csi-data-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523835 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523937 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg4qt\" (UniqueName: \"kubernetes.io/projected/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-kube-api-access-qg4qt\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.524135 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b65245c-0e70-4a17-b739-9c08059b07dc-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.523095 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ace7d0c-1b65-484b-9724-a03aded5ec7f-serving-cert\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.524672 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f617943-c63f-4006-907c-dc2584eac526-serving-cert\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.521603 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f40ec490-af02-4935-bb20-3698b71fce88-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.525503 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-config\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.525684 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2f8ww"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.522024 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-profile-collector-cert\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.527677 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3f0f887-4427-41fa-a495-470f6a1da8ae-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.528069 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d551409-85f7-4c8f-8144-64ac0bb1f155-config-volume\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.529087 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-trusted-ca\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.529584 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-service-ca\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.529571 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.529946 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-oauth-config\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.530005 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4b65245c-0e70-4a17-b739-9c08059b07dc-proxy-tls\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.530038 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9c216d7c-42b5-45fb-a68d-8e38d622978f-metrics-tls\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.530105 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-metrics-tls\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.530452 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3f0f887-4427-41fa-a495-470f6a1da8ae-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.530540 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f40ec490-af02-4935-bb20-3698b71fce88-srv-cert\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.530949 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d6df145a-2f02-4834-bb15-07a9b4e70784-certs\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.550928 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-bound-sa-token\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.550968 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvhbh\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-kube-api-access-gvhbh\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.550994 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551018 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361af061-ac94-45f6-af48-9e6f0a5a89e1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551048 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-trusted-ca\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551085 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjgwg\" (UniqueName: \"kubernetes.io/projected/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-kube-api-access-rjgwg\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551111 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d6df145a-2f02-4834-bb15-07a9b4e70784-node-bootstrap-token\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551144 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/022b7871-8bf2-4432-9f33-d816fdd16fce-signing-cabundle\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551165 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqjz7\" (UniqueName: \"kubernetes.io/projected/022b7871-8bf2-4432-9f33-d816fdd16fce-kube-api-access-bqjz7\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551189 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4b84\" (UniqueName: \"kubernetes.io/projected/94a6782c-15c8-43c6-a4f2-6c297ba52df0-kube-api-access-s4b84\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551212 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s55g7\" (UniqueName: \"kubernetes.io/projected/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-kube-api-access-s55g7\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551249 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e053140-7618-4580-8899-a121dd6759f8-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551271 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-config\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551299 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f617943-c63f-4006-907c-dc2584eac526-config\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551322 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e053140-7618-4580-8899-a121dd6759f8-config\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551349 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14128854-7eae-4729-90c4-10370fde7337-apiservice-cert\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551448 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-client\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551496 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-mountpoint-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551538 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c216d7c-42b5-45fb-a68d-8e38d622978f-config-volume\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551585 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-certificates\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551607 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14128854-7eae-4729-90c4-10370fde7337-webhook-cert\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551661 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7093ca4-c09c-4031-ba6f-e7fc85890480-service-ca-bundle\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551745 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fbbj\" (UniqueName: \"kubernetes.io/projected/4d551409-85f7-4c8f-8144-64ac0bb1f155-kube-api-access-4fbbj\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551892 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d551409-85f7-4c8f-8144-64ac0bb1f155-secret-volume\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.551925 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-serving-cert\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.552758 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-metrics-certs\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.544626 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/022b7871-8bf2-4432-9f33-d816fdd16fce-signing-key\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.531227 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-trusted-ca\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.532347 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-config\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.533077 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-serving-cert\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.555228 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-serving-cert\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.545487 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-stats-auth\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.549326 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-service-ca\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.558282 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-console-config\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.558601 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f617943-c63f-4006-907c-dc2584eac526-config\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.559236 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e053140-7618-4580-8899-a121dd6759f8-config\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.559517 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.560808 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c58668af-88e5-4058-9571-5ce0f3fd7e9f-trusted-ca-bundle\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.540152 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.533400 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-srv-cert\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.561303 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e053140-7618-4580-8899-a121dd6759f8-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.562002 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7093ca4-c09c-4031-ba6f-e7fc85890480-service-ca-bundle\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.542993 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8skch"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.565207 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/361af061-ac94-45f6-af48-9e6f0a5a89e1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.533946 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-config\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.565287 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c216d7c-42b5-45fb-a68d-8e38d622978f-config-volume\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.539639 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-tls\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.544296 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b8533351-ed92-4278-89ed-8e3f31aecb20-metrics-tls\") pod \"dns-operator-744455d44c-8vr8f\" (UID: \"b8533351-ed92-4278-89ed-8e3f31aecb20\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.544542 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a7093ca4-c09c-4031-ba6f-e7fc85890480-default-certificate\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.566574 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.566994 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-certificates\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.568013 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7ace7d0c-1b65-484b-9724-a03aded5ec7f-etcd-client\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.568217 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-trusted-ca\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.568528 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/022b7871-8bf2-4432-9f33-d816fdd16fce-signing-cabundle\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.571393 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/14128854-7eae-4729-90c4-10370fde7337-apiservice-cert\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.573787 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d551409-85f7-4c8f-8144-64ac0bb1f155-secret-volume\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.574120 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/14128854-7eae-4729-90c4-10370fde7337-webhook-cert\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.592810 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.598752 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d4zrj"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.600717 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-722r9\" (UniqueName: \"kubernetes.io/projected/9c216d7c-42b5-45fb-a68d-8e38d622978f-kube-api-access-722r9\") pod \"dns-default-f5g22\" (UID: \"9c216d7c-42b5-45fb-a68d-8e38d622978f\") " pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.608915 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxjkf\" (UniqueName: \"kubernetes.io/projected/a345d711-84e3-47c8-a255-f833dfaca7fa-kube-api-access-wxjkf\") pod \"marketplace-operator-79b997595-mfxlj\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.609009 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.612688 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w8d8\" (UniqueName: \"kubernetes.io/projected/4b65245c-0e70-4a17-b739-9c08059b07dc-kube-api-access-9w8d8\") pod \"machine-config-controller-84d6567774-kbjfj\" (UID: \"4b65245c-0e70-4a17-b739-9c08059b07dc\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: W0130 00:10:55.633633 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5af6d643_70f6_435e_b323_fac9aa37b466.slice/crio-3a6746339fdeff22212fc93bf77abaec20ff627e0c283804c97fbce655dd7268 WatchSource:0}: Error finding container 3a6746339fdeff22212fc93bf77abaec20ff627e0c283804c97fbce655dd7268: Status 404 returned error can't find the container with id 3a6746339fdeff22212fc93bf77abaec20ff627e0c283804c97fbce655dd7268 Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.635039 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxgrc\" (UniqueName: \"kubernetes.io/projected/361af061-ac94-45f6-af48-9e6f0a5a89e1-kube-api-access-kxgrc\") pod \"kube-storage-version-migrator-operator-b67b599dd-qw447\" (UID: \"361af061-ac94-45f6-af48-9e6f0a5a89e1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.642615 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.656544 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.658134 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d6df145a-2f02-4834-bb15-07a9b4e70784-node-bootstrap-token\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.658251 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4b84\" (UniqueName: \"kubernetes.io/projected/94a6782c-15c8-43c6-a4f2-6c297ba52df0-kube-api-access-s4b84\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.658331 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-mountpoint-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.658462 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-registration-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.658616 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/91f35d98-710b-4439-9c13-91f00f6646c7-cert\") pod \"ingress-canary-6q2hj\" (UID: \"91f35d98-710b-4439-9c13-91f00f6646c7\") " pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661311 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-socket-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661435 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-plugins-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661509 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661598 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptjps\" (UniqueName: \"kubernetes.io/projected/91f35d98-710b-4439-9c13-91f00f6646c7-kube-api-access-ptjps\") pod \"ingress-canary-6q2hj\" (UID: \"91f35d98-710b-4439-9c13-91f00f6646c7\") " pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661630 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdcb9\" (UniqueName: \"kubernetes.io/projected/d6df145a-2f02-4834-bb15-07a9b4e70784-kube-api-access-sdcb9\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661699 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-csi-data-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.661742 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d6df145a-2f02-4834-bb15-07a9b4e70784-certs\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.662833 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-plugins-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.662932 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-registration-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.662974 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-mountpoint-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.663192 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.163170219 +0000 UTC m=+142.754642157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.663351 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-csi-data-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.663487 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/94a6782c-15c8-43c6-a4f2-6c297ba52df0-socket-dir\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.664681 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72tlm\" (UniqueName: \"kubernetes.io/projected/b8533351-ed92-4278-89ed-8e3f31aecb20-kube-api-access-72tlm\") pod \"dns-operator-744455d44c-8vr8f\" (UID: \"b8533351-ed92-4278-89ed-8e3f31aecb20\") " pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.687913 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/91f35d98-710b-4439-9c13-91f00f6646c7-cert\") pod \"ingress-canary-6q2hj\" (UID: \"91f35d98-710b-4439-9c13-91f00f6646c7\") " pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.693075 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d6df145a-2f02-4834-bb15-07a9b4e70784-certs\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.693353 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.695507 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d6df145a-2f02-4834-bb15-07a9b4e70784-node-bootstrap-token\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.696476 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvcxh\" (UniqueName: \"kubernetes.io/projected/c58668af-88e5-4058-9571-5ce0f3fd7e9f-kube-api-access-jvcxh\") pod \"console-f9d7485db-2bgb9\" (UID: \"c58668af-88e5-4058-9571-5ce0f3fd7e9f\") " pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.696890 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv77v\" (UniqueName: \"kubernetes.io/projected/1e053140-7618-4580-8899-a121dd6759f8-kube-api-access-bv77v\") pod \"openshift-apiserver-operator-796bbdcf4f-nm4dq\" (UID: \"1e053140-7618-4580-8899-a121dd6759f8\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.720394 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scjzj\" (UniqueName: \"kubernetes.io/projected/5f617943-c63f-4006-907c-dc2584eac526-kube-api-access-scjzj\") pod \"service-ca-operator-777779d784-67ddr\" (UID: \"5f617943-c63f-4006-907c-dc2584eac526\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.731183 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwnvf\" (UniqueName: \"kubernetes.io/projected/7ace7d0c-1b65-484b-9724-a03aded5ec7f-kube-api-access-bwnvf\") pod \"etcd-operator-b45778765-4bhjw\" (UID: \"7ace7d0c-1b65-484b-9724-a03aded5ec7f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.762064 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6h4t\" (UniqueName: \"kubernetes.io/projected/14128854-7eae-4729-90c4-10370fde7337-kube-api-access-l6h4t\") pod \"packageserver-d55dfcdfc-fmh4j\" (UID: \"14128854-7eae-4729-90c4-10370fde7337\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.762895 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.763146 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.263113786 +0000 UTC m=+142.854585534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.763371 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.763868 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.263860596 +0000 UTC m=+142.855332344 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.792393 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhk4q\" (UniqueName: \"kubernetes.io/projected/f40ec490-af02-4935-bb20-3698b71fce88-kube-api-access-fhk4q\") pod \"olm-operator-6b444d44fb-8r5gv\" (UID: \"f40ec490-af02-4935-bb20-3698b71fce88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.802551 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.809089 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6e019250-5bdc-4a0e-a1e9-ecaa947faa4e-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ngf2g\" (UID: \"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.809743 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.823217 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.832287 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stlkn\" (UniqueName: \"kubernetes.io/projected/a7093ca4-c09c-4031-ba6f-e7fc85890480-kube-api-access-stlkn\") pod \"router-default-5444994796-fvdhv\" (UID: \"a7093ca4-c09c-4031-ba6f-e7fc85890480\") " pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.837673 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.858708 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhtdh\" (UniqueName: \"kubernetes.io/projected/fe084dfc-7335-4d94-8b7c-09637c52b19f-kube-api-access-xhtdh\") pod \"migrator-59844c95c7-9dggz\" (UID: \"fe084dfc-7335-4d94-8b7c-09637c52b19f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.864990 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg4qt\" (UniqueName: \"kubernetes.io/projected/ea6cf88e-bedf-4010-b737-0c93f2c4d4be-kube-api-access-qg4qt\") pod \"ingress-operator-5b745b69d9-4xtnb\" (UID: \"ea6cf88e-bedf-4010-b737-0c93f2c4d4be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.868440 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.869913 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.369882266 +0000 UTC m=+142.961354014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.871460 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.871755 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-p87kw"] Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.872270 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.37225266 +0000 UTC m=+142.963724398 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.875047 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.886962 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.908627 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.915856 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.922687 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.929797 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-qtxcm"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.936448 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.945856 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz"] Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.952378 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-bound-sa-token\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.960360 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29495520-c9vgk" event={"ID":"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e","Type":"ContainerStarted","Data":"90e2837a2e429ec1f476b964cf9aaf2fc5515b45e35fd03769fd5f61a650cb79"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.960426 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29495520-c9vgk" event={"ID":"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e","Type":"ContainerStarted","Data":"43b4a4ac1de95958d0cbf74cc941926ee82f3928958882a5d089446522eb360a"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.960627 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s55g7\" (UniqueName: \"kubernetes.io/projected/0203f99c-4bc4-4ebd-b17b-7f1f54f54315-kube-api-access-s55g7\") pod \"catalog-operator-68c6474976-clmbp\" (UID: \"0203f99c-4bc4-4ebd-b17b-7f1f54f54315\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.962091 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvhbh\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-kube-api-access-gvhbh\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.962800 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" event={"ID":"93771e95-54e2-4f25-86b1-c1f6b0f18a8f","Type":"ContainerStarted","Data":"a9ff00a790325cd1df4675c373a4ec298f8c8b855cecc22beee82710280b7d23"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.962849 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" event={"ID":"93771e95-54e2-4f25-86b1-c1f6b0f18a8f","Type":"ContainerStarted","Data":"307b7635b0aca3b2a7adb9466395fb67d193d415abc79c7139aee18b2450ef1a"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.963501 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fbbj\" (UniqueName: \"kubernetes.io/projected/4d551409-85f7-4c8f-8144-64ac0bb1f155-kube-api-access-4fbbj\") pod \"collect-profiles-29495520-k6gh6\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.964397 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.968507 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" event={"ID":"bb88aa5c-bc88-4447-b233-ad65df878fcd","Type":"ContainerStarted","Data":"31b5d52c77d2b52588600c3b96d83a1d04b9e7b71c4a1159500656ed5c366c1b"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.968555 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" event={"ID":"bb88aa5c-bc88-4447-b233-ad65df878fcd","Type":"ContainerStarted","Data":"0e32d503fadaa50d28ee75d25ba9c27f4281d628bdd63335d0b4ec5496b1aead"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.971147 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.973669 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjgwg\" (UniqueName: \"kubernetes.io/projected/7fdeda66-c9ae-4b65-8f49-c3f46a903e52-kube-api-access-rjgwg\") pod \"console-operator-58897d9998-jjb74\" (UID: \"7fdeda66-c9ae-4b65-8f49-c3f46a903e52\") " pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.975095 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:55 crc kubenswrapper[4885]: E0130 00:10:55.975673 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.47563611 +0000 UTC m=+143.067107878 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.978089 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" event={"ID":"19609ed6-3922-4b41-b02d-abf4fd2922a6","Type":"ContainerStarted","Data":"06140b10d22d29f54d8ffe7be81fe368e6997a01509095f5e8c215732b6ac672"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.978163 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" event={"ID":"19609ed6-3922-4b41-b02d-abf4fd2922a6","Type":"ContainerStarted","Data":"c97973cb36efa08b99c146d0d2f0493da3b2644df744f63159bd76b01cffd8c6"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.992128 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" event={"ID":"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610","Type":"ContainerStarted","Data":"17766f638d18e73fa07185e52acc7bfc941897db67059f602e2338d6aa2d7bc6"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.995462 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" event={"ID":"929bbe65-1902-453c-bebf-4e833b325ab1","Type":"ContainerStarted","Data":"e4d4537ab91da298d4352b122eb8b678311dc7e51faff49a6ae56089b5f27e84"} Jan 30 00:10:55 crc kubenswrapper[4885]: I0130 00:10:55.997791 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqjz7\" (UniqueName: \"kubernetes.io/projected/022b7871-8bf2-4432-9f33-d816fdd16fce-kube-api-access-bqjz7\") pod \"service-ca-9c57cc56f-fkgqw\" (UID: \"022b7871-8bf2-4432-9f33-d816fdd16fce\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.000331 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.015443 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" event={"ID":"5af6d643-70f6-435e-b323-fac9aa37b466","Type":"ContainerStarted","Data":"3a6746339fdeff22212fc93bf77abaec20ff627e0c283804c97fbce655dd7268"} Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.019947 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" event={"ID":"5bd56c34-d51d-4f93-975b-d5c96f11b7f5","Type":"ContainerStarted","Data":"63cfdbcec84f8f6d931a34e0ddf652a9e78e266a729b1603d83def89c53527d3"} Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.023465 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4b84\" (UniqueName: \"kubernetes.io/projected/94a6782c-15c8-43c6-a4f2-6c297ba52df0-kube-api-access-s4b84\") pod \"csi-hostpathplugin-tmtj2\" (UID: \"94a6782c-15c8-43c6-a4f2-6c297ba52df0\") " pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.032923 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.040934 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptjps\" (UniqueName: \"kubernetes.io/projected/91f35d98-710b-4439-9c13-91f00f6646c7-kube-api-access-ptjps\") pod \"ingress-canary-6q2hj\" (UID: \"91f35d98-710b-4439-9c13-91f00f6646c7\") " pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.042815 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" event={"ID":"fcde4e44-9ff6-4539-84f3-a016080e13ce","Type":"ContainerStarted","Data":"1ec702bcec1481303c4c3602b0ad803ba5b5840813a1289f97f19dd80fe9a7ea"} Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.045140 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6q2hj" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.076558 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.077584 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.57756799 +0000 UTC m=+143.169039738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.078711 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdcb9\" (UniqueName: \"kubernetes.io/projected/d6df145a-2f02-4834-bb15-07a9b4e70784-kube-api-access-sdcb9\") pod \"machine-config-server-92xfx\" (UID: \"d6df145a-2f02-4834-bb15-07a9b4e70784\") " pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.135842 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.146051 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.166538 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.178837 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.179340 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.679317226 +0000 UTC m=+143.270788974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.211412 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.211452 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.211464 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.250478 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.282640 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.283159 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.783145397 +0000 UTC m=+143.374617145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.325552 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-92xfx" Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.357320 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-m229k"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.374470 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.384343 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.385058 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.885037687 +0000 UTC m=+143.476509435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.398980 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hcx7w"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.488744 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.489339 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:56.989323291 +0000 UTC m=+143.580795029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.589899 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.590680 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.090662265 +0000 UTC m=+143.682134013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.692048 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.692477 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.192459752 +0000 UTC m=+143.783931500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.769495 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.773097 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.781692 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-f5g22"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.795549 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.796274 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.296250912 +0000 UTC m=+143.887722650 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.833443 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447"] Jan 30 00:10:56 crc kubenswrapper[4885]: I0130 00:10:56.900227 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:56 crc kubenswrapper[4885]: E0130 00:10:56.900682 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.40066505 +0000 UTC m=+143.992136798 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.000821 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.001291 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.501272675 +0000 UTC m=+144.092744433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: W0130 00:10:57.060258 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b65245c_0e70_4a17_b739_9c08059b07dc.slice/crio-183fd14a34abe4bd1bcbc5dcf5fd3b58b877dace91c6f934ae2d17a93c9738f7 WatchSource:0}: Error finding container 183fd14a34abe4bd1bcbc5dcf5fd3b58b877dace91c6f934ae2d17a93c9738f7: Status 404 returned error can't find the container with id 183fd14a34abe4bd1bcbc5dcf5fd3b58b877dace91c6f934ae2d17a93c9738f7 Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.090203 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" event={"ID":"4a58c498-e399-40e4-a271-d42efd6c6745","Type":"ContainerStarted","Data":"f2d1ca25ead140fa571a7c802523b5727e82977ae6c68342362a44c0746003f1"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.098892 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-92xfx" event={"ID":"d6df145a-2f02-4834-bb15-07a9b4e70784","Type":"ContainerStarted","Data":"6ce78eb927255401aa0b0d67299cfc6a7f856c56aae36f3e6c0430756bb1caf3"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.102376 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.102677 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.602665741 +0000 UTC m=+144.194137489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.104694 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" event={"ID":"5af6d643-70f6-435e-b323-fac9aa37b466","Type":"ContainerStarted","Data":"b3e5364c2ef471aba52a609ce49379aa95f067f8d7613d81b76210e4c6fdc16d"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.105523 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-fvdhv" event={"ID":"a7093ca4-c09c-4031-ba6f-e7fc85890480","Type":"ContainerStarted","Data":"d34c63b5c4e07e6bdb6137375de021fa06b73b87154e3244110f0b83ae69a6f3"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.106203 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" event={"ID":"8f8ad951-1f10-4883-8132-7afa6c3df767","Type":"ContainerStarted","Data":"48612955c7f778d60a79c509ddeee36cc7eb4e01cc9d30edec656b420b6316f2"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.107046 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" event={"ID":"d806b98c-1d4e-42b2-9da3-4afc4ca6e255","Type":"ContainerStarted","Data":"8e73790122e2bd05b05e0311b1f47c64f09a2453bac21770a0992283012381ec"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.108139 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" event={"ID":"12453952-578c-4ef3-97bd-eee389ec3d91","Type":"ContainerStarted","Data":"4bdab87575bfbd3f66b1908f9b8d7d395ba6a2d805ce12cd9bf2c8e5553bf770"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.112974 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" event={"ID":"28808760-39b2-4b17-82b4-d3b6783ed31c","Type":"ContainerStarted","Data":"624ef4a247a26891de92e60cbe7bf575a8dd99f1215b35c0bfb0d8dbe24ae0da"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.114202 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" event={"ID":"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e","Type":"ContainerStarted","Data":"d3be046856e358deee92ba482410dd2890d9fbecfb8a239e215bccb0017c75c0"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.114988 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" event={"ID":"b60a4e04-0955-4b9a-9165-92ee6a82b1a6","Type":"ContainerStarted","Data":"228aa16a5de06a4f951175d101da0b35ffefa3d8552468b14c79955edbf51b0e"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.115679 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qtxcm" event={"ID":"0eb0e632-fc50-4845-aa1b-4aab2bb7826b","Type":"ContainerStarted","Data":"a8ddd0a87a73848337b5b1686c8e78f86b22d40b705f5e39e62974737327ae0c"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.119175 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" event={"ID":"063f4cfd-666f-4493-8678-df74e0347ba3","Type":"ContainerStarted","Data":"79e70e1509d7749d98321db6161611dcf1f94095805315d3c91d2e6153712122"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.121228 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" event={"ID":"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03","Type":"ContainerStarted","Data":"005bf459db7735f819ce1abad74c9a580479375a5b887120e816aa6cb8d4fc18"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.123060 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f5g22" event={"ID":"9c216d7c-42b5-45fb-a68d-8e38d622978f","Type":"ContainerStarted","Data":"0159ef1a1795a36662b8c293f803b5f349b08b35795007e88c41ded792f101bc"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.132528 4885 generic.go:334] "Generic (PLEG): container finished" podID="fcde4e44-9ff6-4539-84f3-a016080e13ce" containerID="c126a9608996fbd1a535679ff9f1a92f12ad0ac616f541fbdd9a0b94b6da6248" exitCode=0 Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.132619 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" event={"ID":"fcde4e44-9ff6-4539-84f3-a016080e13ce","Type":"ContainerDied","Data":"c126a9608996fbd1a535679ff9f1a92f12ad0ac616f541fbdd9a0b94b6da6248"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.138401 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" event={"ID":"a6dfcb67-43fe-46d9-9349-c581afa2d82f","Type":"ContainerStarted","Data":"09c75f8d53b67cd155d2bd2833c2bf02c65c450264a2ceeb5f3bd742afbb4c77"} Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.206914 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.208387 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.708348423 +0000 UTC m=+144.299820231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.322874 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.323484 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.823461696 +0000 UTC m=+144.414933444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.424362 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.425352 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:57.925334155 +0000 UTC m=+144.516805903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.531278 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.532818 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.032791433 +0000 UTC m=+144.624263181 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.638923 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.639395 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.139372929 +0000 UTC m=+144.730844677 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.718359 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.722213 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.725454 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.733793 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29495520-c9vgk" podStartSLOduration=118.733745527 podStartE2EDuration="1m58.733745527s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:57.727591862 +0000 UTC m=+144.319063630" watchObservedRunningTime="2026-01-30 00:10:57.733745527 +0000 UTC m=+144.325217275" Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.741646 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.742158 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.242141962 +0000 UTC m=+144.833613720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.744483 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mfxlj"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.759669 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.763408 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2bgb9"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.779728 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-7hvjx" podStartSLOduration=118.779700628 podStartE2EDuration="1m58.779700628s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:57.767955833 +0000 UTC m=+144.359427581" watchObservedRunningTime="2026-01-30 00:10:57.779700628 +0000 UTC m=+144.371172376" Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.807762 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8vr8f"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.824501 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4bhjw"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.826704 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.846159 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.848109 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.34805986 +0000 UTC m=+144.939531608 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.857529 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.878326 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-67ddr"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.910473 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.928346 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-jjb74"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.943606 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6q2hj"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.947893 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:57 crc kubenswrapper[4885]: E0130 00:10:57.948340 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.448321856 +0000 UTC m=+145.039793604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.952159 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.955585 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fkgqw"] Jan 30 00:10:57 crc kubenswrapper[4885]: I0130 00:10:57.959945 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tmtj2"] Jan 30 00:10:57 crc kubenswrapper[4885]: W0130 00:10:57.978408 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea6cf88e_bedf_4010_b737_0c93f2c4d4be.slice/crio-1f2013116ff61a071c965a18032f46b3bf74b05cba69e56c478b403f0fcf817a WatchSource:0}: Error finding container 1f2013116ff61a071c965a18032f46b3bf74b05cba69e56c478b403f0fcf817a: Status 404 returned error can't find the container with id 1f2013116ff61a071c965a18032f46b3bf74b05cba69e56c478b403f0fcf817a Jan 30 00:10:57 crc kubenswrapper[4885]: W0130 00:10:57.979900 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fdeda66_c9ae_4b65_8f49_c3f46a903e52.slice/crio-5efc807af7ad9888d031ea95bfcd3a87ba93fe28e6bd81361021c249cb2cf5bb WatchSource:0}: Error finding container 5efc807af7ad9888d031ea95bfcd3a87ba93fe28e6bd81361021c249cb2cf5bb: Status 404 returned error can't find the container with id 5efc807af7ad9888d031ea95bfcd3a87ba93fe28e6bd81361021c249cb2cf5bb Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.049446 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.052735 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.552682271 +0000 UTC m=+145.144154019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.152463 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" event={"ID":"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e","Type":"ContainerStarted","Data":"ef4c9d24c576591d52ceb055f5cb96b184a0f17f9bb6f3bacea8512c25bd8592"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.153430 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qtxcm" event={"ID":"0eb0e632-fc50-4845-aa1b-4aab2bb7826b","Type":"ContainerStarted","Data":"a1c6e760a7ec3b83dd530a31fafe50c34b3f40b503dddf8c593801e478f1ff56"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.153567 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.154044 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.654031026 +0000 UTC m=+145.245502764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.154610 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.158348 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.158403 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.172832 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-92xfx" event={"ID":"d6df145a-2f02-4834-bb15-07a9b4e70784","Type":"ContainerStarted","Data":"02f42a74a4ed9f0908e6cc0a044b7c50ded9d4994179859672b98e29f0c39230"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.174367 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-qtxcm" podStartSLOduration=119.17434466 podStartE2EDuration="1m59.17434466s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.174319479 +0000 UTC m=+144.765791237" watchObservedRunningTime="2026-01-30 00:10:58.17434466 +0000 UTC m=+144.765816408" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.178385 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" event={"ID":"a345d711-84e3-47c8-a255-f833dfaca7fa","Type":"ContainerStarted","Data":"13d16105c3ffd7b319e1258860a37fd66f32b7ac34b8684aa782a79b1dd489c8"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.185702 4885 generic.go:334] "Generic (PLEG): container finished" podID="5af6d643-70f6-435e-b323-fac9aa37b466" containerID="b3e5364c2ef471aba52a609ce49379aa95f067f8d7613d81b76210e4c6fdc16d" exitCode=0 Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.185975 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" event={"ID":"5af6d643-70f6-435e-b323-fac9aa37b466","Type":"ContainerDied","Data":"b3e5364c2ef471aba52a609ce49379aa95f067f8d7613d81b76210e4c6fdc16d"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.187723 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" event={"ID":"7ace7d0c-1b65-484b-9724-a03aded5ec7f","Type":"ContainerStarted","Data":"91f19ab0de8551b7bdfc9f6bbe574a354b4dd9a779d60c6031441ecfbabd8aa1"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.190450 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" event={"ID":"93771e95-54e2-4f25-86b1-c1f6b0f18a8f","Type":"ContainerStarted","Data":"e5b6009a7b68c751993fe9dc024acb909d70b4ac8eae3dfc9337d455fbafc1d2"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.191859 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" event={"ID":"022b7871-8bf2-4432-9f33-d816fdd16fce","Type":"ContainerStarted","Data":"4a552012e7d249f0245b4b2d70e46e6d1b0265115c7c0e80d6039e50add5ce63"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.193910 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" event={"ID":"4b65245c-0e70-4a17-b739-9c08059b07dc","Type":"ContainerStarted","Data":"ed6ffad5916da46b3d4a12c62187847b5317135f5165edaeb91b3f1db3943b6c"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.193939 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" event={"ID":"4b65245c-0e70-4a17-b739-9c08059b07dc","Type":"ContainerStarted","Data":"183fd14a34abe4bd1bcbc5dcf5fd3b58b877dace91c6f934ae2d17a93c9738f7"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.200218 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" event={"ID":"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610","Type":"ContainerStarted","Data":"76f2f560a2cadd1862baabeb7262848e82dec2c24a77f28c4ce3bbcac2fc5696"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.200251 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" event={"ID":"51bce3b6-6a4d-45ea-89a7-bf5cf50d7610","Type":"ContainerStarted","Data":"a691576730354ea0d79ba404f116e7384d73c0ded53ac06e84f1044d1282e2f1"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.203499 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" event={"ID":"ea6cf88e-bedf-4010-b737-0c93f2c4d4be","Type":"ContainerStarted","Data":"1f2013116ff61a071c965a18032f46b3bf74b05cba69e56c478b403f0fcf817a"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.207994 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" event={"ID":"fcde4e44-9ff6-4539-84f3-a016080e13ce","Type":"ContainerStarted","Data":"d5b1b7b2e5934aa3a4ac92fba6c464e37bc75be9134c213a4b6f695f824644fa"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.211176 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" event={"ID":"a6dfcb67-43fe-46d9-9349-c581afa2d82f","Type":"ContainerStarted","Data":"a17a1b4ba4d7a0e5337d42780f728e50d790e2539c05d2e3f96dba0964485f6d"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.221850 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" event={"ID":"361af061-ac94-45f6-af48-9e6f0a5a89e1","Type":"ContainerStarted","Data":"13b786adc48177895778baafb70d5afe6b4e961d9af688d9134ddbf88cf89c9c"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.221905 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" event={"ID":"361af061-ac94-45f6-af48-9e6f0a5a89e1","Type":"ContainerStarted","Data":"fc1058c2fc019449543acf19c2a9defaaf2fd19c426311fe90c121232b53b517"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.224386 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-92xfx" podStartSLOduration=6.22435312 podStartE2EDuration="6.22435312s" podCreationTimestamp="2026-01-30 00:10:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.198461916 +0000 UTC m=+144.789933664" watchObservedRunningTime="2026-01-30 00:10:58.22435312 +0000 UTC m=+144.815824858" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.228402 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" event={"ID":"063f4cfd-666f-4493-8678-df74e0347ba3","Type":"ContainerStarted","Data":"cdde72036a655664e5491693b9ff78413ef41c284da24f98952be55e6c77b1c8"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.240894 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6q2hj" event={"ID":"91f35d98-710b-4439-9c13-91f00f6646c7","Type":"ContainerStarted","Data":"52d3f79532dfa94ad8e6aaa85e56fd84d5a43e61d55a3633f976dda42200803a"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.244908 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cd22w" podStartSLOduration=119.2448794 podStartE2EDuration="1m59.2448794s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.242803454 +0000 UTC m=+144.834275212" watchObservedRunningTime="2026-01-30 00:10:58.2448794 +0000 UTC m=+144.836351148" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.257393 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.258930 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.758902326 +0000 UTC m=+145.350374074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.271580 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6fm5q" podStartSLOduration=119.271558454 podStartE2EDuration="1m59.271558454s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.268814231 +0000 UTC m=+144.860285979" watchObservedRunningTime="2026-01-30 00:10:58.271558454 +0000 UTC m=+144.863030202" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.288337 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" event={"ID":"fe084dfc-7335-4d94-8b7c-09637c52b19f","Type":"ContainerStarted","Data":"9e472ccca921703ae50a1b37624f4654ee43c89010a30a8bca581ca356baa089"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.291088 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" event={"ID":"4a58c498-e399-40e4-a271-d42efd6c6745","Type":"ContainerStarted","Data":"8c6425469e898b051e4d9d4e9e5b4874c187905a92c8cfee0711ed1426ed4dcc"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.308741 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" event={"ID":"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03","Type":"ContainerStarted","Data":"dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.309505 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.312837 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-r9hww" podStartSLOduration=119.312796499 podStartE2EDuration="1m59.312796499s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.305177615 +0000 UTC m=+144.896649363" watchObservedRunningTime="2026-01-30 00:10:58.312796499 +0000 UTC m=+144.904268247" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.320960 4885 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-p87kw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" start-of-body= Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.321047 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.323854 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" event={"ID":"929bbe65-1902-453c-bebf-4e833b325ab1","Type":"ContainerStarted","Data":"c8a5344263a429acaee41f9c27d92523a71645e1606d4d13fc5b599bf391989b"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.323932 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.326749 4885 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-mhgj8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.326821 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.343974 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qw447" podStartSLOduration=119.343946904 podStartE2EDuration="1m59.343946904s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.332098067 +0000 UTC m=+144.923569815" watchObservedRunningTime="2026-01-30 00:10:58.343946904 +0000 UTC m=+144.935418652" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.360359 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.361475 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.861458233 +0000 UTC m=+145.452929981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.363917 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f5g22" event={"ID":"9c216d7c-42b5-45fb-a68d-8e38d622978f","Type":"ContainerStarted","Data":"a41f5e8fbfc841081daad2a990e6b636e1c9ba64a6fa2169ab7011ca125ac443"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.407376 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" podStartSLOduration=119.407345422 podStartE2EDuration="1m59.407345422s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.393563743 +0000 UTC m=+144.985035491" watchObservedRunningTime="2026-01-30 00:10:58.407345422 +0000 UTC m=+144.998817170" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.427569 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" podStartSLOduration=119.427541083 podStartE2EDuration="1m59.427541083s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.42219563 +0000 UTC m=+145.013667378" watchObservedRunningTime="2026-01-30 00:10:58.427541083 +0000 UTC m=+145.019012831" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.442320 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" event={"ID":"b8533351-ed92-4278-89ed-8e3f31aecb20","Type":"ContainerStarted","Data":"4f00b1d84faec7c5472db916a32e4dade18a1bcf3c22b5295a084a2bb2189881"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.456419 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-jjb74" event={"ID":"7fdeda66-c9ae-4b65-8f49-c3f46a903e52","Type":"ContainerStarted","Data":"5efc807af7ad9888d031ea95bfcd3a87ba93fe28e6bd81361021c249cb2cf5bb"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.456641 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" event={"ID":"1e053140-7618-4580-8899-a121dd6759f8","Type":"ContainerStarted","Data":"3e1713a4ffbb3e0b2209fbd0bb7d14105e15b1b6f36e0e8870e02938c87d8f68"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.460736 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2bgb9" event={"ID":"c58668af-88e5-4058-9571-5ce0f3fd7e9f","Type":"ContainerStarted","Data":"d721ac51b96f13ab8d3bb7966174cf7683b5c28abfcee69972397311ca6af29b"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.468017 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.486822 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:58.98676058 +0000 UTC m=+145.578232328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.488407 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-b5gjz" podStartSLOduration=119.488379572 podStartE2EDuration="1m59.488379572s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.473546495 +0000 UTC m=+145.065018243" watchObservedRunningTime="2026-01-30 00:10:58.488379572 +0000 UTC m=+145.079851310" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.547664 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" event={"ID":"b60a4e04-0955-4b9a-9165-92ee6a82b1a6","Type":"ContainerStarted","Data":"786d7077cc528ab8eb34b7a99c2f72def72c2f33875c06e631fb70c6d6366eca"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.574138 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" event={"ID":"5bd56c34-d51d-4f93-975b-d5c96f11b7f5","Type":"ContainerStarted","Data":"19292bb7922049d4245dba96da233b16ba1b4e3160e29e5ba750c586d41e4d4b"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.576001 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.580358 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" event={"ID":"0203f99c-4bc4-4ebd-b17b-7f1f54f54315","Type":"ContainerStarted","Data":"6e4238ee7d6021fd140e3849a87e36df2d0f0b93690b53803b551ec418122632"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.588049 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.588537 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.088521125 +0000 UTC m=+145.679992873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.595760 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-8knvk" podStartSLOduration=119.595731989 podStartE2EDuration="1m59.595731989s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.586652186 +0000 UTC m=+145.178123934" watchObservedRunningTime="2026-01-30 00:10:58.595731989 +0000 UTC m=+145.187203737" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.600755 4885 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2f8ww container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.602762 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.626895 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podStartSLOduration=119.626862343 podStartE2EDuration="1m59.626862343s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.61892204 +0000 UTC m=+145.210393808" watchObservedRunningTime="2026-01-30 00:10:58.626862343 +0000 UTC m=+145.218334091" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.654370 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-fvdhv" event={"ID":"a7093ca4-c09c-4031-ba6f-e7fc85890480","Type":"ContainerStarted","Data":"0a531cd3613861f719e0bef94cde4833fb2f10c4e10d1c8c1d7c2be981087226"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.669973 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" event={"ID":"94a6782c-15c8-43c6-a4f2-6c297ba52df0","Type":"ContainerStarted","Data":"d7da4ceeb87caf3aafaf0eedc3f7f1bbb12e4925aaf3ee49816503e3379bf351"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.702345 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" event={"ID":"19609ed6-3922-4b41-b02d-abf4fd2922a6","Type":"ContainerStarted","Data":"cfd98e36b57a59d4d275f4e2afb965caf0458543dbbce6c5fb7269f27269ec93"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.704825 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.705165 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.20514157 +0000 UTC m=+145.796613318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.705462 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.706540 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.206517406 +0000 UTC m=+145.797989194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.704736 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-fvdhv" podStartSLOduration=119.704144553 podStartE2EDuration="1m59.704144553s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.70330716 +0000 UTC m=+145.294778908" watchObservedRunningTime="2026-01-30 00:10:58.704144553 +0000 UTC m=+145.295616301" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.711902 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" event={"ID":"5f617943-c63f-4006-907c-dc2584eac526","Type":"ContainerStarted","Data":"0c447aba48a9cfe2b75af5a542775bbf7efbb2321075b06ff14abe10529bb6e4"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.718874 4885 generic.go:334] "Generic (PLEG): container finished" podID="8f8ad951-1f10-4883-8132-7afa6c3df767" containerID="b0f5bb5b06ebce48a8813f359d552eef5ba83782225c96842a1d3731e4c1fb27" exitCode=0 Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.718966 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" event={"ID":"8f8ad951-1f10-4883-8132-7afa6c3df767","Type":"ContainerDied","Data":"b0f5bb5b06ebce48a8813f359d552eef5ba83782225c96842a1d3731e4c1fb27"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.726078 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" event={"ID":"4d551409-85f7-4c8f-8144-64ac0bb1f155","Type":"ContainerStarted","Data":"bcb7788121ce444bf23cda77c83213d3b870543b59533c4e3cd2bd77171e1a69"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.752606 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" event={"ID":"12453952-578c-4ef3-97bd-eee389ec3d91","Type":"ContainerStarted","Data":"a94411c344e81e733e3e2062d16d4240d8f15678ed83aefb8809ebffe6fade0c"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.760884 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" event={"ID":"d806b98c-1d4e-42b2-9da3-4afc4ca6e255","Type":"ContainerStarted","Data":"ff491260cd1f9103b1d84570853ba35cc6cd12bb47c8720fe2757bd0d8e1a274"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.763035 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" event={"ID":"14128854-7eae-4729-90c4-10370fde7337","Type":"ContainerStarted","Data":"5279076885deef25bb071bd3a54446ef4233ea42aa3798c2058098f75b2614a9"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.765736 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.767835 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" podStartSLOduration=119.767817909 podStartE2EDuration="1m59.767817909s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.766627947 +0000 UTC m=+145.358099695" watchObservedRunningTime="2026-01-30 00:10:58.767817909 +0000 UTC m=+145.359289657" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.767982 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-24xxs" podStartSLOduration=119.767977063 podStartE2EDuration="1m59.767977063s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.73053722 +0000 UTC m=+145.322008968" watchObservedRunningTime="2026-01-30 00:10:58.767977063 +0000 UTC m=+145.359448811" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.774928 4885 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fmh4j container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.775017 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" podUID="14128854-7eae-4729-90c4-10370fde7337" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.804585 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" event={"ID":"f40ec490-af02-4935-bb20-3698b71fce88","Type":"ContainerStarted","Data":"8322a178cfac67e5681a3fba1683f003fbf5526378629f2192364007a28d92a8"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.804815 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.806719 4885 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8r5gv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.806830 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" podUID="f40ec490-af02-4935-bb20-3698b71fce88" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.807265 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.807606 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.307572053 +0000 UTC m=+145.899043801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.819087 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" event={"ID":"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e","Type":"ContainerStarted","Data":"624202005c21c34c1afb4b0a91e1cc4f08df45321983c16f75e3639a8085e545"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.836379 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" event={"ID":"28808760-39b2-4b17-82b4-d3b6783ed31c","Type":"ContainerStarted","Data":"3cba2ce4a03c9472cc0b0a7ab53079fc1d8b4f4b806d11866330d98bae42f8b2"} Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.876256 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9wwjr" podStartSLOduration=119.876236743 podStartE2EDuration="1m59.876236743s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.831517865 +0000 UTC m=+145.422989613" watchObservedRunningTime="2026-01-30 00:10:58.876236743 +0000 UTC m=+145.467708491" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.879798 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" podStartSLOduration=119.879789018 podStartE2EDuration="1m59.879789018s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.876214063 +0000 UTC m=+145.467685811" watchObservedRunningTime="2026-01-30 00:10:58.879789018 +0000 UTC m=+145.471260766" Jan 30 00:10:58 crc kubenswrapper[4885]: I0130 00:10:58.910913 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:58 crc kubenswrapper[4885]: E0130 00:10:58.913476 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.41346212 +0000 UTC m=+146.004933868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.012709 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.013242 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.513185962 +0000 UTC m=+146.104657720 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.114440 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.115221 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.615196865 +0000 UTC m=+146.206668613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.138318 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.143736 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.143835 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.216950 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.217137 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.717109185 +0000 UTC m=+146.308580933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.217736 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.218344 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.718319847 +0000 UTC m=+146.309791595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.319441 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.319999 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.81998275 +0000 UTC m=+146.411454498 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.423477 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.424046 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:10:59.924029078 +0000 UTC m=+146.515500826 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.524679 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.525495 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.025477786 +0000 UTC m=+146.616949534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.626691 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.627164 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.127149199 +0000 UTC m=+146.718620937 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.728211 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.728482 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.228449353 +0000 UTC m=+146.819921101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.728988 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.729408 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.229393268 +0000 UTC m=+146.820865016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.832124 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.832547 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.332527751 +0000 UTC m=+146.923999499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.857740 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" event={"ID":"9d8eb84b-e3c3-46ab-b9bb-29f63d57284e","Type":"ContainerStarted","Data":"0dfb2f384d6cb03474ffa1de075de1ac2973fae8d944223098252f889adc3a3c"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.857889 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.859831 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-jjb74" event={"ID":"7fdeda66-c9ae-4b65-8f49-c3f46a903e52","Type":"ContainerStarted","Data":"dd172051c7f4723117a0c96b8f87a050052a1d9e167bd9df22500908aa625dca"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.860894 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.862626 4885 patch_prober.go:28] interesting pod/console-operator-58897d9998-jjb74 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.862720 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-jjb74" podUID="7fdeda66-c9ae-4b65-8f49-c3f46a903e52" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.863023 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" event={"ID":"4b65245c-0e70-4a17-b739-9c08059b07dc","Type":"ContainerStarted","Data":"aa6d92fc12f0c90b1a3cca7fbdaee952d595b506a8df9e5cc7ecb6fa48ad7eab"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.865586 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" event={"ID":"0203f99c-4bc4-4ebd-b17b-7f1f54f54315","Type":"ContainerStarted","Data":"5a0c7dbc2400849b4f9a1028b6e9cadc400ff3b045a774f4b59202102162c934"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.866354 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.868191 4885 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-clmbp container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.868264 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" podUID="0203f99c-4bc4-4ebd-b17b-7f1f54f54315" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.868842 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" event={"ID":"7ace7d0c-1b65-484b-9724-a03aded5ec7f","Type":"ContainerStarted","Data":"c664fc61eba9a572db986efc4f9534add1aee8e5717e5f07c6260633b79019a9"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.870200 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" event={"ID":"6e019250-5bdc-4a0e-a1e9-ecaa947faa4e","Type":"ContainerStarted","Data":"d3924423038dc0a9b18fca948b843c56ef1a828542323f66c671f24a3a5f32e4"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.873706 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" event={"ID":"8f8ad951-1f10-4883-8132-7afa6c3df767","Type":"ContainerStarted","Data":"b640357442eedfe96099a145ac4d86a19d45d162895d2bd5e20ecc506bc12c3f"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.881064 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" event={"ID":"fcde4e44-9ff6-4539-84f3-a016080e13ce","Type":"ContainerStarted","Data":"f35050a0d39196512c3ae706ddb5487f773678feaf0cb4f4cc012d4b36d9c48a"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.884535 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" event={"ID":"d806b98c-1d4e-42b2-9da3-4afc4ca6e255","Type":"ContainerStarted","Data":"e58e6789472eb0c73856e574c2a7b3e77c17cec84bd2e395f49c5de4106c9bb2"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.889467 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f5g22" event={"ID":"9c216d7c-42b5-45fb-a68d-8e38d622978f","Type":"ContainerStarted","Data":"9b76fe8fb6a49db3f0a1da944b7544f9867f578cbd8b185fc44334bc7406d258"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.890526 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-f5g22" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.894461 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" podStartSLOduration=120.89444865 podStartE2EDuration="2m0.89444865s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:58.899064915 +0000 UTC m=+145.490536663" watchObservedRunningTime="2026-01-30 00:10:59.89444865 +0000 UTC m=+146.485920398" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.895971 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" podStartSLOduration=120.895965921 podStartE2EDuration="2m0.895965921s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:59.894261265 +0000 UTC m=+146.485733013" watchObservedRunningTime="2026-01-30 00:10:59.895965921 +0000 UTC m=+146.487437669" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.896195 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" event={"ID":"f40ec490-af02-4935-bb20-3698b71fce88","Type":"ContainerStarted","Data":"133b2f63b0e490083892a4006273a6525162314ae96cd269e39945497cdb8269"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.897289 4885 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8r5gv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.897331 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" podUID="f40ec490-af02-4935-bb20-3698b71fce88" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.898881 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" event={"ID":"1e053140-7618-4580-8899-a121dd6759f8","Type":"ContainerStarted","Data":"db3559437bc606bd134a3f685d002c453bd22d5cf7e0fa9525c6e9c6dd9d1101"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.907280 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" event={"ID":"14128854-7eae-4729-90c4-10370fde7337","Type":"ContainerStarted","Data":"5dfe1435606ba95805df74f26ef5602f5e6b1994598fd11724487bb9c97f2d77"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.908810 4885 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fmh4j container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.908893 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" podUID="14128854-7eae-4729-90c4-10370fde7337" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.916732 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2bgb9" event={"ID":"c58668af-88e5-4058-9571-5ce0f3fd7e9f","Type":"ContainerStarted","Data":"e4bcddd59867f5659806a86ec20f12fa0ee4296f7d94c3534db0983c94348cc3"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.923022 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6q2hj" event={"ID":"91f35d98-710b-4439-9c13-91f00f6646c7","Type":"ContainerStarted","Data":"9d7c428c41252efeb6bbd0cc46cfcda81c97b33d1b91b86cc04e869858172d42"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.927930 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" event={"ID":"a345d711-84e3-47c8-a255-f833dfaca7fa","Type":"ContainerStarted","Data":"6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.929255 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.939947 4885 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mfxlj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.940056 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.940149 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.940194 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.942632 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.947492 4885 patch_prober.go:28] interesting pod/apiserver-76f77b778f-xw5nc container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.947565 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" podUID="fcde4e44-9ff6-4539-84f3-a016080e13ce" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.947616 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" event={"ID":"fe084dfc-7335-4d94-8b7c-09637c52b19f","Type":"ContainerStarted","Data":"6205c179272a482af32f171a7ea1557a50c207eb1e7d5292cd1cc3d1dada1ab7"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.947689 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" event={"ID":"fe084dfc-7335-4d94-8b7c-09637c52b19f","Type":"ContainerStarted","Data":"f0381b2bdc17d811e955e8dedbf9e13e83af7d34c038000e2d847ef6c3584255"} Jan 30 00:10:59 crc kubenswrapper[4885]: E0130 00:10:59.951061 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.451044126 +0000 UTC m=+147.042515874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.970072 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-jjb74" podStartSLOduration=120.970046045 podStartE2EDuration="2m0.970046045s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:59.935299964 +0000 UTC m=+146.526771712" watchObservedRunningTime="2026-01-30 00:10:59.970046045 +0000 UTC m=+146.561517793" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.974960 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" event={"ID":"022b7871-8bf2-4432-9f33-d816fdd16fce","Type":"ContainerStarted","Data":"0281e7725973ea61a511521d4e439496f248c4312c8ff130ecf8f8aecb55e61a"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.980722 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-67ddr" event={"ID":"5f617943-c63f-4006-907c-dc2584eac526","Type":"ContainerStarted","Data":"bdd89cca41fa6a4787bf93a4969178f352e307819adfda9bfcc129f1134f113a"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.983889 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" event={"ID":"28808760-39b2-4b17-82b4-d3b6783ed31c","Type":"ContainerStarted","Data":"f24b58a041668565426f9fe078f210ee035313e8c2e4f2787bdc50fd9c0d9a81"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.994349 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-4bhjw" podStartSLOduration=120.994325215 podStartE2EDuration="2m0.994325215s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:59.9929968 +0000 UTC m=+146.584468548" watchObservedRunningTime="2026-01-30 00:10:59.994325215 +0000 UTC m=+146.585796963" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.995354 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" podStartSLOduration=120.995345812 podStartE2EDuration="2m0.995345812s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:10:59.969468879 +0000 UTC m=+146.560940627" watchObservedRunningTime="2026-01-30 00:10:59.995345812 +0000 UTC m=+146.586817560" Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.997515 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" event={"ID":"b8533351-ed92-4278-89ed-8e3f31aecb20","Type":"ContainerStarted","Data":"4d11cb771c273a60f0f62d65d7fad2c76ca40ae80458848687f143b82bad02e1"} Jan 30 00:10:59 crc kubenswrapper[4885]: I0130 00:10:59.997584 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" event={"ID":"b8533351-ed92-4278-89ed-8e3f31aecb20","Type":"ContainerStarted","Data":"705f04ef6ccd8d9043bb71c170a66be4459d59eb1b54483a3fceef9037b29750"} Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.004961 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" event={"ID":"5af6d643-70f6-435e-b323-fac9aa37b466","Type":"ContainerStarted","Data":"38571fdc46f69cc36e8ff82e40ab632e1f30e9f9fe2392c276bcf20f4f0e92e6"} Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.005868 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.016254 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" event={"ID":"ea6cf88e-bedf-4010-b737-0c93f2c4d4be","Type":"ContainerStarted","Data":"a60c7fd3906497e4873cb030f1722f30f088a9799bdb24c2e4e7bec73ddf9f85"} Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.016324 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" event={"ID":"ea6cf88e-bedf-4010-b737-0c93f2c4d4be","Type":"ContainerStarted","Data":"d26d888a8078ea49fa0c3c35009e24038e3a99b42c536659626440a48abc7941"} Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.020813 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" event={"ID":"4d551409-85f7-4c8f-8144-64ac0bb1f155","Type":"ContainerStarted","Data":"eaa77db4d472b2e2d938065454acf9077c60340c362781e5fc4057078c5c550b"} Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.021539 4885 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2f8ww container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.021612 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.021797 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.021845 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.025543 4885 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-p87kw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.025629 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.025711 4885 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-mhgj8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.025727 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.050571 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.052826 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.552749191 +0000 UTC m=+147.144220939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.067828 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" podStartSLOduration=121.067806744 podStartE2EDuration="2m1.067806744s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.046302118 +0000 UTC m=+146.637773866" watchObservedRunningTime="2026-01-30 00:11:00.067806744 +0000 UTC m=+146.659278492" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.069063 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ngf2g" podStartSLOduration=121.069057438 podStartE2EDuration="2m1.069057438s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.066199091 +0000 UTC m=+146.657670839" watchObservedRunningTime="2026-01-30 00:11:00.069057438 +0000 UTC m=+146.660529186" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.091750 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" podStartSLOduration=121.091721255 podStartE2EDuration="2m1.091721255s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.091406766 +0000 UTC m=+146.682878514" watchObservedRunningTime="2026-01-30 00:11:00.091721255 +0000 UTC m=+146.683193003" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.140253 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.140336 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.144716 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.144761 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.146945 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-hcx7w" podStartSLOduration=121.146930574 podStartE2EDuration="2m1.146930574s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.144350265 +0000 UTC m=+146.735822013" watchObservedRunningTime="2026-01-30 00:11:00.146930574 +0000 UTC m=+146.738402322" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.147231 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kbjfj" podStartSLOduration=121.147224442 podStartE2EDuration="2m1.147224442s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.122195251 +0000 UTC m=+146.713666999" watchObservedRunningTime="2026-01-30 00:11:00.147224442 +0000 UTC m=+146.738696190" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.153323 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.159687 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.659647874 +0000 UTC m=+147.251119632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.176913 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-f5g22" podStartSLOduration=8.176889736 podStartE2EDuration="8.176889736s" podCreationTimestamp="2026-01-30 00:10:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.175872139 +0000 UTC m=+146.767343887" watchObservedRunningTime="2026-01-30 00:11:00.176889736 +0000 UTC m=+146.768361484" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.201456 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" podStartSLOduration=121.201433974 podStartE2EDuration="2m1.201433974s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.199790279 +0000 UTC m=+146.791262027" watchObservedRunningTime="2026-01-30 00:11:00.201433974 +0000 UTC m=+146.792905722" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.228158 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-m229k" podStartSLOduration=121.228134139 podStartE2EDuration="2m1.228134139s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.226204077 +0000 UTC m=+146.817675825" watchObservedRunningTime="2026-01-30 00:11:00.228134139 +0000 UTC m=+146.819605887" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.255424 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.255717 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.755667466 +0000 UTC m=+147.347139214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.256920 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.257314 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.75730081 +0000 UTC m=+147.348772558 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.269438 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-fkgqw" podStartSLOduration=121.269418264 podStartE2EDuration="2m1.269418264s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.259886139 +0000 UTC m=+146.851357887" watchObservedRunningTime="2026-01-30 00:11:00.269418264 +0000 UTC m=+146.860890002" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.284402 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.284680 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.288858 4885 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-wn7lm container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.23:8443/livez\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.288926 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" podUID="8f8ad951-1f10-4883-8132-7afa6c3df767" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.23:8443/livez\": dial tcp 10.217.0.23:8443: connect: connection refused" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.338425 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4xtnb" podStartSLOduration=121.338407123 podStartE2EDuration="2m1.338407123s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.334994772 +0000 UTC m=+146.926466520" watchObservedRunningTime="2026-01-30 00:11:00.338407123 +0000 UTC m=+146.929878871" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.358437 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.359063 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.859044355 +0000 UTC m=+147.450516103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.457268 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-6q2hj" podStartSLOduration=8.457245416 podStartE2EDuration="8.457245416s" podCreationTimestamp="2026-01-30 00:10:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.456125656 +0000 UTC m=+147.047597404" watchObservedRunningTime="2026-01-30 00:11:00.457245416 +0000 UTC m=+147.048717164" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.461069 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.461447 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:00.961433109 +0000 UTC m=+147.552904857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.534690 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-d4zrj" podStartSLOduration=121.53466737 podStartE2EDuration="2m1.53466737s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.533690505 +0000 UTC m=+147.125162253" watchObservedRunningTime="2026-01-30 00:11:00.53466737 +0000 UTC m=+147.126139118" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.561760 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.561979 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.061921451 +0000 UTC m=+147.653393199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.562079 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.562528 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.062510696 +0000 UTC m=+147.653982444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.584839 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" podStartSLOduration=121.584812894 podStartE2EDuration="2m1.584812894s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.584266609 +0000 UTC m=+147.175738357" watchObservedRunningTime="2026-01-30 00:11:00.584812894 +0000 UTC m=+147.176284642" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.619297 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" podStartSLOduration=121.619279607 podStartE2EDuration="2m1.619279607s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.618014333 +0000 UTC m=+147.209486081" watchObservedRunningTime="2026-01-30 00:11:00.619279607 +0000 UTC m=+147.210751355" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.653275 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2bgb9" podStartSLOduration=121.653250427 podStartE2EDuration="2m1.653250427s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.652193879 +0000 UTC m=+147.243665627" watchObservedRunningTime="2026-01-30 00:11:00.653250427 +0000 UTC m=+147.244722175" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.663204 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.663439 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.163394069 +0000 UTC m=+147.754865837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.663591 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.664125 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.164107948 +0000 UTC m=+147.755579696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.820860 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.821379 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.321325129 +0000 UTC m=+147.912796887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.821716 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.822204 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.322193173 +0000 UTC m=+147.913664921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.831398 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-nm4dq" podStartSLOduration=121.831364649 podStartE2EDuration="2m1.831364649s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.69366058 +0000 UTC m=+147.285132328" watchObservedRunningTime="2026-01-30 00:11:00.831364649 +0000 UTC m=+147.422836397" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.871433 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9dggz" podStartSLOduration=121.871409411 podStartE2EDuration="2m1.871409411s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.840574505 +0000 UTC m=+147.432046243" watchObservedRunningTime="2026-01-30 00:11:00.871409411 +0000 UTC m=+147.462881159" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.872845 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8vr8f" podStartSLOduration=121.87283946 podStartE2EDuration="2m1.87283946s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:00.87099706 +0000 UTC m=+147.462468818" watchObservedRunningTime="2026-01-30 00:11:00.87283946 +0000 UTC m=+147.464311198" Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.923455 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.923712 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.423675861 +0000 UTC m=+148.015147609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:00 crc kubenswrapper[4885]: I0130 00:11:00.923970 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:00 crc kubenswrapper[4885]: E0130 00:11:00.924534 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.424524854 +0000 UTC m=+148.015996602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.029545 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.029812 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.529779044 +0000 UTC m=+148.121250792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.029931 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.030636 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.530612106 +0000 UTC m=+148.122083934 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037313 4885 patch_prober.go:28] interesting pod/console-operator-58897d9998-jjb74 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037401 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037399 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-jjb74" podUID="7fdeda66-c9ae-4b65-8f49-c3f46a903e52" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037443 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037407 4885 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mfxlj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037516 4885 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-fmh4j container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037517 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037324 4885 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8r5gv container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037594 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" podUID="f40ec490-af02-4935-bb20-3698b71fce88" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037473 4885 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-clmbp container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037604 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" podUID="14128854-7eae-4729-90c4-10370fde7337" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037626 4885 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2f8ww container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037647 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" podUID="0203f99c-4bc4-4ebd-b17b-7f1f54f54315" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.037688 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.131107 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.131291 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.631260732 +0000 UTC m=+148.222732470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.133113 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.138914 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.638898817 +0000 UTC m=+148.230370565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.148063 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:01 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:01 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:01 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.148149 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.234911 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.235153 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.735102894 +0000 UTC m=+148.326574642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.235513 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.235931 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.735923586 +0000 UTC m=+148.327395334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.336331 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.336555 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.836519441 +0000 UTC m=+148.427991179 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.336903 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.337235 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.837222359 +0000 UTC m=+148.428694107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.414587 4885 csr.go:261] certificate signing request csr-stvkh is approved, waiting to be issued Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.421227 4885 csr.go:257] certificate signing request csr-stvkh is issued Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.437934 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.438176 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.938137143 +0000 UTC m=+148.529608881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.438386 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.438977 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:01.938954225 +0000 UTC m=+148.530425963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.444843 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.539514 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.539981 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.039942221 +0000 UTC m=+148.631413969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.540499 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.540976 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.040960848 +0000 UTC m=+148.632432596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.641364 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.641645 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.141625134 +0000 UTC m=+148.733096882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.641713 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.642060 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.142050476 +0000 UTC m=+148.733522224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.742890 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.743080 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.243044501 +0000 UTC m=+148.834516249 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.743272 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.743693 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.243681469 +0000 UTC m=+148.835153407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.844925 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.845204 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.345163867 +0000 UTC m=+148.936635615 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.845334 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.845475 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.845741 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.845794 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.345755663 +0000 UTC m=+148.937227581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.856074 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.868787 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.881960 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.908218 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:11:01 crc kubenswrapper[4885]: I0130 00:11:01.946524 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:01 crc kubenswrapper[4885]: E0130 00:11:01.946832 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.4468121 +0000 UTC m=+149.038283848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.049367 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.049421 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.049526 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.050597 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.55058176 +0000 UTC m=+149.142053508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.054964 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.056000 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.116591 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" event={"ID":"94a6782c-15c8-43c6-a4f2-6c297ba52df0","Type":"ContainerStarted","Data":"7bff9dbf6e812a61bf6d4aefe99505b9dbbf4ca462ea847d63c22f8b80356a18"} Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.136398 4885 generic.go:334] "Generic (PLEG): container finished" podID="4d551409-85f7-4c8f-8144-64ac0bb1f155" containerID="eaa77db4d472b2e2d938065454acf9077c60340c362781e5fc4057078c5c550b" exitCode=0 Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.137536 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" event={"ID":"4d551409-85f7-4c8f-8144-64ac0bb1f155","Type":"ContainerDied","Data":"eaa77db4d472b2e2d938065454acf9077c60340c362781e5fc4057078c5c550b"} Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.138154 4885 patch_prober.go:28] interesting pod/console-operator-58897d9998-jjb74 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.138256 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-jjb74" podUID="7fdeda66-c9ae-4b65-8f49-c3f46a903e52" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.141061 4885 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-8skch container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.141110 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" podUID="5af6d643-70f6-435e-b323-fac9aa37b466" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.142222 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:02 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:02 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:02 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.142252 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.151136 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.152456 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.652426708 +0000 UTC m=+149.243898456 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.169205 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.253544 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.255128 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.755114169 +0000 UTC m=+149.346585917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.354595 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.354953 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.854920963 +0000 UTC m=+149.446400861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.422623 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-30 00:06:01 +0000 UTC, rotation deadline is 2026-11-12 10:59:31.504223499 +0000 UTC Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.422676 4885 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6874h48m29.081551951s for next certificate rotation Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.457154 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.457715 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:02.957687665 +0000 UTC m=+149.549159413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.558349 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.558600 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.058567068 +0000 UTC m=+149.650038816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.659252 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.659842 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.159821941 +0000 UTC m=+149.751293689 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.760989 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.772097 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.272043767 +0000 UTC m=+149.863515515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.863716 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.864204 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.364187765 +0000 UTC m=+149.955659513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.969277 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.969452 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.469415064 +0000 UTC m=+150.060886812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:02 crc kubenswrapper[4885]: I0130 00:11:02.969590 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:02 crc kubenswrapper[4885]: E0130 00:11:02.970014 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.46999601 +0000 UTC m=+150.061467758 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: W0130 00:11:03.060536 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-db92a03c8d979e0c3243ae2b1b06d577594d7527f473e03f476e04e6b4146d06 WatchSource:0}: Error finding container db92a03c8d979e0c3243ae2b1b06d577594d7527f473e03f476e04e6b4146d06: Status 404 returned error can't find the container with id db92a03c8d979e0c3243ae2b1b06d577594d7527f473e03f476e04e6b4146d06 Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.071594 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.072037 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.572004893 +0000 UTC m=+150.163476631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.072140 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.072569 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.572558917 +0000 UTC m=+150.164030665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.143406 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:03 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:03 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:03 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.143477 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.143990 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0bc632b36d55c72cde0132e8c4abef184a99dc1bf776db88dd28cc35e1b5fbd2"} Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.145824 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3324ff579ed319bfc5a867de4baeb37be6b2b00738995ec2677114282e9b046f"} Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.147176 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"db92a03c8d979e0c3243ae2b1b06d577594d7527f473e03f476e04e6b4146d06"} Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.177541 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.177929 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.677912349 +0000 UTC m=+150.269384097 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.279454 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.280002 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.779979994 +0000 UTC m=+150.371451812 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.381349 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.381597 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.881552545 +0000 UTC m=+150.473024293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.381726 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.382224 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.882204642 +0000 UTC m=+150.473676390 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.471793 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.472642 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.483040 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.483263 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.983195317 +0000 UTC m=+150.574667065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.483442 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.483515 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.483573 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.483919 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:03.983909777 +0000 UTC m=+150.575381525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.491436 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.491471 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.502889 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.588466 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.589156 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.089136846 +0000 UTC m=+150.680608594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.589232 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.589279 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.589321 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.589947 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.089939228 +0000 UTC m=+150.681410976 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.590139 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.660957 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.696466 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.696924 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.196906983 +0000 UTC m=+150.788378721 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.794551 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.800253 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.800732 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.300717524 +0000 UTC m=+150.892189272 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.931889 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:03 crc kubenswrapper[4885]: E0130 00:11:03.932455 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.432435043 +0000 UTC m=+151.023906801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.940082 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v5294"] Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.949200 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.965740 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 00:11:03 crc kubenswrapper[4885]: I0130 00:11:03.995823 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v5294"] Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.051304 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.051376 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-utilities\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.051452 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz8hz\" (UniqueName: \"kubernetes.io/projected/c1ac97f9-b076-40c9-80fc-a2f6111d313b-kube-api-access-nz8hz\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.051478 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-catalog-content\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.053532 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.553517736 +0000 UTC m=+151.144989484 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.066710 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7fh97"] Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.067812 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: W0130 00:11:04.082007 4885 reflector.go:561] object-"openshift-marketplace"/"community-operators-dockercfg-dmngl": failed to list *v1.Secret: secrets "community-operators-dockercfg-dmngl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.082068 4885 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"community-operators-dockercfg-dmngl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"community-operators-dockercfg-dmngl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.162837 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.162876 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:04 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:04 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:04 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.163025 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.663003639 +0000 UTC m=+151.254475377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.164753 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkn5v\" (UniqueName: \"kubernetes.io/projected/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-kube-api-access-mkn5v\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.165191 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.165295 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-utilities\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.165390 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-utilities\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.165481 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz8hz\" (UniqueName: \"kubernetes.io/projected/c1ac97f9-b076-40c9-80fc-a2f6111d313b-kube-api-access-nz8hz\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.165558 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-catalog-content\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.165635 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-catalog-content\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.164859 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.166145 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.666130813 +0000 UTC m=+151.257602561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.166610 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-utilities\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.166969 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-catalog-content\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.183842 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8skch" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.220784 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz8hz\" (UniqueName: \"kubernetes.io/projected/c1ac97f9-b076-40c9-80fc-a2f6111d313b-kube-api-access-nz8hz\") pod \"certified-operators-v5294\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.233528 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8e74e61de91d005acdb4d5f1c340f5b8ab2856b93b3a4f3e0cf36bdc7129fbbd"} Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.235442 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7fh97"] Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.267710 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bw6zc"] Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.267730 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.267841 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.767818987 +0000 UTC m=+151.359290735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.280397 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkn5v\" (UniqueName: \"kubernetes.io/projected/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-kube-api-access-mkn5v\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.280466 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.280564 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-utilities\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.280639 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-catalog-content\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.281174 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-catalog-content\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.269675 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.281830 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.781806022 +0000 UTC m=+151.373277840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.282528 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-utilities\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.290367 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.290402 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"581387efec24b7ab73fef0494eb88ba6d86d3904094caf50780e67053d800234"} Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.290509 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.302280 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.334070 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bw6zc"] Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.382455 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d551409-85f7-4c8f-8144-64ac0bb1f155-config-volume\") pod \"4d551409-85f7-4c8f-8144-64ac0bb1f155\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.382910 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d551409-85f7-4c8f-8144-64ac0bb1f155-secret-volume\") pod \"4d551409-85f7-4c8f-8144-64ac0bb1f155\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.383026 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.383069 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fbbj\" (UniqueName: \"kubernetes.io/projected/4d551409-85f7-4c8f-8144-64ac0bb1f155-kube-api-access-4fbbj\") pod \"4d551409-85f7-4c8f-8144-64ac0bb1f155\" (UID: \"4d551409-85f7-4c8f-8144-64ac0bb1f155\") " Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.383339 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-catalog-content\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.383367 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktsg6\" (UniqueName: \"kubernetes.io/projected/9af08248-ac20-4708-8753-bd2d97ad46a6-kube-api-access-ktsg6\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.383398 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-utilities\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.384499 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d551409-85f7-4c8f-8144-64ac0bb1f155-config-volume" (OuterVolumeSpecName: "config-volume") pod "4d551409-85f7-4c8f-8144-64ac0bb1f155" (UID: "4d551409-85f7-4c8f-8144-64ac0bb1f155"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.385719 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.885701295 +0000 UTC m=+151.477173043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.393408 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkn5v\" (UniqueName: \"kubernetes.io/projected/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-kube-api-access-mkn5v\") pod \"community-operators-7fh97\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.399838 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d551409-85f7-4c8f-8144-64ac0bb1f155-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4d551409-85f7-4c8f-8144-64ac0bb1f155" (UID: "4d551409-85f7-4c8f-8144-64ac0bb1f155"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.423091 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d551409-85f7-4c8f-8144-64ac0bb1f155-kube-api-access-4fbbj" (OuterVolumeSpecName: "kube-api-access-4fbbj") pod "4d551409-85f7-4c8f-8144-64ac0bb1f155" (UID: "4d551409-85f7-4c8f-8144-64ac0bb1f155"). InnerVolumeSpecName "kube-api-access-4fbbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490018 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-utilities\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490102 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490210 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-catalog-content\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490251 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktsg6\" (UniqueName: \"kubernetes.io/projected/9af08248-ac20-4708-8753-bd2d97ad46a6-kube-api-access-ktsg6\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490340 4885 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d551409-85f7-4c8f-8144-64ac0bb1f155-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490358 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fbbj\" (UniqueName: \"kubernetes.io/projected/4d551409-85f7-4c8f-8144-64ac0bb1f155-kube-api-access-4fbbj\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.490373 4885 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d551409-85f7-4c8f-8144-64ac0bb1f155-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.491234 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-utilities\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.491640 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:04.991624392 +0000 UTC m=+151.583096150 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.492176 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-catalog-content\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.542510 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jmbh5"] Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.542961 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d551409-85f7-4c8f-8144-64ac0bb1f155" containerName="collect-profiles" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.542979 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d551409-85f7-4c8f-8144-64ac0bb1f155" containerName="collect-profiles" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.544246 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktsg6\" (UniqueName: \"kubernetes.io/projected/9af08248-ac20-4708-8753-bd2d97ad46a6-kube-api-access-ktsg6\") pod \"certified-operators-bw6zc\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.573177 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d551409-85f7-4c8f-8144-64ac0bb1f155" containerName="collect-profiles" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.578531 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jmbh5"] Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.578676 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.592348 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.592993 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.092970418 +0000 UTC m=+151.684442176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.635155 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.693717 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-catalog-content\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.693797 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7rn9\" (UniqueName: \"kubernetes.io/projected/dffca359-3f77-47e0-999e-ec7b5d72176d-kube-api-access-q7rn9\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.693865 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.693911 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-utilities\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.694274 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.194256531 +0000 UTC m=+151.785728279 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.794901 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.795448 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-utilities\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.795504 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-catalog-content\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.795537 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7rn9\" (UniqueName: \"kubernetes.io/projected/dffca359-3f77-47e0-999e-ec7b5d72176d-kube-api-access-q7rn9\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.795963 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.295940164 +0000 UTC m=+151.887411912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.796446 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-utilities\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.796568 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-catalog-content\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.861592 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7rn9\" (UniqueName: \"kubernetes.io/projected/dffca359-3f77-47e0-999e-ec7b5d72176d-kube-api-access-q7rn9\") pod \"community-operators-jmbh5\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.906193 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:04 crc kubenswrapper[4885]: E0130 00:11:04.906694 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.406679261 +0000 UTC m=+151.998151009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:04 crc kubenswrapper[4885]: I0130 00:11:04.972982 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.007072 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.007475 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.507444311 +0000 UTC m=+152.098916059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.007545 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.007981 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.507963135 +0000 UTC m=+152.099434953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.014532 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.047235 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.112889 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.114613 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.614595761 +0000 UTC m=+152.206067509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.146852 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:05 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:05 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:05 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.146919 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.147271 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.147287 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.148307 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.148381 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.215210 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.215552 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.715540745 +0000 UTC m=+152.307012493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.275618 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v5294"] Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.316099 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.316561 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.317006 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.816988113 +0000 UTC m=+152.408459861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.332155 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wn7lm" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.337910 4885 patch_prober.go:28] interesting pod/apiserver-76f77b778f-xw5nc container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]log ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]etcd ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/generic-apiserver-start-informers ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/max-in-flight-filter ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 30 00:11:05 crc kubenswrapper[4885]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 30 00:11:05 crc kubenswrapper[4885]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/project.openshift.io-projectcache ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/openshift.io-startinformers ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 30 00:11:05 crc kubenswrapper[4885]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 30 00:11:05 crc kubenswrapper[4885]: livez check failed Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.338011 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" podUID="fcde4e44-9ff6-4539-84f3-a016080e13ce" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.381161 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"47fcefc41072d0cd33785b6942be6b66b4998b0537c800e0cc136c6350f67e51"} Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.397027 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2fa78fe3-5bb2-4645-a1e3-194d7a060741","Type":"ContainerStarted","Data":"bd1e74da72367b2ddfd22f5cabc26d2f863879b48ba54a49b2903e7895b5f661"} Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.418952 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.420070 4885 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-marketplace/community-operators-7fh97" secret="" err="failed to sync secret cache: timed out waiting for the condition" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.420145 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.445120 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:05.945093925 +0000 UTC m=+152.536565673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.446210 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.448261 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.466209 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" event={"ID":"94a6782c-15c8-43c6-a4f2-6c297ba52df0","Type":"ContainerStarted","Data":"70fb439db2b9d8934e514630375fd10b998aa3804300af52545f18e56b35c6ca"} Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.491147 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.502195 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495520-k6gh6" event={"ID":"4d551409-85f7-4c8f-8144-64ac0bb1f155","Type":"ContainerDied","Data":"bcb7788121ce444bf23cda77c83213d3b870543b59533c4e3cd2bd77171e1a69"} Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.502261 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcb7788121ce444bf23cda77c83213d3b870543b59533c4e3cd2bd77171e1a69" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.520829 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.521080 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.021047539 +0000 UTC m=+152.612519277 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.521475 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.521987 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.021972424 +0000 UTC m=+152.613444172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.623237 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.624632 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.124611314 +0000 UTC m=+152.716083062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.698815 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bw6zc"] Jan 30 00:11:05 crc kubenswrapper[4885]: W0130 00:11:05.701608 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9af08248_ac20_4708_8753_bd2d97ad46a6.slice/crio-c9a58a5c56d508e715dc745c1120116951bb43b61342ea2c3b9af9f5e752c799 WatchSource:0}: Error finding container c9a58a5c56d508e715dc745c1120116951bb43b61342ea2c3b9af9f5e752c799: Status 404 returned error can't find the container with id c9a58a5c56d508e715dc745c1120116951bb43b61342ea2c3b9af9f5e752c799 Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.728760 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.729142 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.229129803 +0000 UTC m=+152.820601551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.816997 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.817903 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.839748 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.840242 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.340214159 +0000 UTC m=+152.931685907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.863986 4885 patch_prober.go:28] interesting pod/console-f9d7485db-2bgb9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.864082 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2bgb9" podUID="c58668af-88e5-4058-9571-5ce0f3fd7e9f" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.897710 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.944872 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8r5gv" Jan 30 00:11:05 crc kubenswrapper[4885]: I0130 00:11:05.945316 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:05 crc kubenswrapper[4885]: E0130 00:11:05.947356 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.447337769 +0000 UTC m=+153.038809517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.028361 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-clmbp" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.046277 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.047511 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-fmh4j" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.050190 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.550171964 +0000 UTC m=+153.141643712 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.129652 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mfdx7"] Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.131242 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.137739 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.169726 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:06 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:06 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:06 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.169810 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.174890 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.175264 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.675247434 +0000 UTC m=+153.266719172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.180719 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.180756 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfdx7"] Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.180820 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-jjb74" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.276615 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.276914 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-utilities\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.276969 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr8bd\" (UniqueName: \"kubernetes.io/projected/14fd8cd4-0faa-45da-a532-9528073cfe8e-kube-api-access-lr8bd\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.277021 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-catalog-content\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.278347 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.778324126 +0000 UTC m=+153.369795874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.331585 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7fh97"] Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.381991 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-utilities\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.382052 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr8bd\" (UniqueName: \"kubernetes.io/projected/14fd8cd4-0faa-45da-a532-9528073cfe8e-kube-api-access-lr8bd\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.382085 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-catalog-content\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.382121 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.382480 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:06.882464935 +0000 UTC m=+153.473936683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.383045 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-utilities\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.384264 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-catalog-content\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.429640 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr8bd\" (UniqueName: \"kubernetes.io/projected/14fd8cd4-0faa-45da-a532-9528073cfe8e-kube-api-access-lr8bd\") pod \"redhat-marketplace-mfdx7\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.470534 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n4nxr"] Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.478236 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.503395 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.503527 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.504270 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.004236508 +0000 UTC m=+153.595708246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.508129 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n4nxr"] Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.559274 4885 generic.go:334] "Generic (PLEG): container finished" podID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerID="741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c" exitCode=0 Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.559408 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bw6zc" event={"ID":"9af08248-ac20-4708-8753-bd2d97ad46a6","Type":"ContainerDied","Data":"741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c"} Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.559454 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bw6zc" event={"ID":"9af08248-ac20-4708-8753-bd2d97ad46a6","Type":"ContainerStarted","Data":"c9a58a5c56d508e715dc745c1120116951bb43b61342ea2c3b9af9f5e752c799"} Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.562464 4885 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.608169 4885 generic.go:334] "Generic (PLEG): container finished" podID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerID="fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c" exitCode=0 Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.608677 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerDied","Data":"fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c"} Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.608711 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerStarted","Data":"3b328d643a5da0b6bac284c8fdfb987e1e89821b68d126b0a6734cc2d2825a11"} Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.610466 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-catalog-content\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.610518 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.610561 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-utilities\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.610611 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jxqt\" (UniqueName: \"kubernetes.io/projected/ab0a81e5-1af3-4340-a412-b0ee0d506468-kube-api-access-4jxqt\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.610989 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.110976837 +0000 UTC m=+153.702448585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.650813 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jmbh5"] Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.682703 4885 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.701322 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" event={"ID":"94a6782c-15c8-43c6-a4f2-6c297ba52df0","Type":"ContainerStarted","Data":"ddecad9e5f700f6caefbe23569cc2a2e301e5e93c1f2fe8af68a6bfb17066ee6"} Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.712808 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.713112 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-utilities\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.713186 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jxqt\" (UniqueName: \"kubernetes.io/projected/ab0a81e5-1af3-4340-a412-b0ee0d506468-kube-api-access-4jxqt\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.713219 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-catalog-content\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.715114 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.215090606 +0000 UTC m=+153.806562354 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.727236 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-catalog-content\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.727707 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-utilities\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.735043 4885 generic.go:334] "Generic (PLEG): container finished" podID="2fa78fe3-5bb2-4645-a1e3-194d7a060741" containerID="503d7cfcdd162f8bbc8f5eeacb8b0917a1964db625623d8db338b828cce43306" exitCode=0 Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.736282 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2fa78fe3-5bb2-4645-a1e3-194d7a060741","Type":"ContainerDied","Data":"503d7cfcdd162f8bbc8f5eeacb8b0917a1964db625623d8db338b828cce43306"} Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.767864 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jxqt\" (UniqueName: \"kubernetes.io/projected/ab0a81e5-1af3-4340-a412-b0ee0d506468-kube-api-access-4jxqt\") pod \"redhat-marketplace-n4nxr\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.815085 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.815517 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.315504036 +0000 UTC m=+153.906975784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.851561 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.916138 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.916952 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.416920463 +0000 UTC m=+154.008392211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:06 crc kubenswrapper[4885]: I0130 00:11:06.917021 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:06 crc kubenswrapper[4885]: E0130 00:11:06.918563 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.418545027 +0000 UTC m=+154.010016775 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.019023 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:07 crc kubenswrapper[4885]: E0130 00:11:07.019566 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.519543453 +0000 UTC m=+154.111015201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.062598 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gd928"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.063855 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.066559 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.084905 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gd928"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.121690 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:07 crc kubenswrapper[4885]: E0130 00:11:07.122128 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.62211518 +0000 UTC m=+154.213586928 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.140238 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfdx7"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.146124 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:07 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:07 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:07 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.146216 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.225553 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.226178 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-catalog-content\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.226283 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-utilities\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.226304 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6xsj\" (UniqueName: \"kubernetes.io/projected/faedfaad-6883-471e-9a4e-d15cc6b969d7-kube-api-access-n6xsj\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: E0130 00:11:07.226409 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.726390404 +0000 UTC m=+154.317862142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.328543 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-utilities\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.328586 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6xsj\" (UniqueName: \"kubernetes.io/projected/faedfaad-6883-471e-9a4e-d15cc6b969d7-kube-api-access-n6xsj\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.328626 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-catalog-content\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.328668 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:07 crc kubenswrapper[4885]: E0130 00:11:07.329059 4885 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-30 00:11:07.829045863 +0000 UTC m=+154.420517611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dnhsm" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.329681 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-utilities\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.330274 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-catalog-content\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.333288 4885 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-30T00:11:06.682753971Z","Handler":null,"Name":""} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.336987 4885 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.337021 4885 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.374163 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6xsj\" (UniqueName: \"kubernetes.io/projected/faedfaad-6883-471e-9a4e-d15cc6b969d7-kube-api-access-n6xsj\") pod \"redhat-operators-gd928\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.402613 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n4nxr"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.429854 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.448516 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.465475 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wng4c"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.466799 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.468298 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.484338 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wng4c"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.532924 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.614662 4885 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.614714 4885 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.647603 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vzbr\" (UniqueName: \"kubernetes.io/projected/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-kube-api-access-9vzbr\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.647652 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-catalog-content\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.647745 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-utilities\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.708183 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-f5g22" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.737047 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dnhsm\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.748701 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vzbr\" (UniqueName: \"kubernetes.io/projected/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-kube-api-access-9vzbr\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.748792 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-catalog-content\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.748956 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-utilities\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.751449 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-catalog-content\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.752129 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-utilities\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.805240 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vzbr\" (UniqueName: \"kubernetes.io/projected/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-kube-api-access-9vzbr\") pod \"redhat-operators-wng4c\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.820377 4885 generic.go:334] "Generic (PLEG): container finished" podID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerID="aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487" exitCode=0 Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.820454 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerDied","Data":"aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.820489 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerStarted","Data":"5b9f8b2a017bd629c04e3b234667aacbe1250444a1d3a8c02d491c2a31d1b0f2"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.859893 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.876596 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" event={"ID":"94a6782c-15c8-43c6-a4f2-6c297ba52df0","Type":"ContainerStarted","Data":"4290f47231fa5ef2d688d204f0653ff642cd50a5d0761be0c04e6e42a7f15237"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.885976 4885 generic.go:334] "Generic (PLEG): container finished" podID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerID="eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa" exitCode=0 Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.886321 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fh97" event={"ID":"fdd3cba5-cf61-40cd-8c88-d289887fbf8a","Type":"ContainerDied","Data":"eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.886461 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fh97" event={"ID":"fdd3cba5-cf61-40cd-8c88-d289887fbf8a","Type":"ContainerStarted","Data":"7825c062eb2df92c593ab36796bf8355fe0204ec930bd0e30504bd2f40964021"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.920333 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-tmtj2" podStartSLOduration=15.920310603 podStartE2EDuration="15.920310603s" podCreationTimestamp="2026-01-30 00:10:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:07.916397288 +0000 UTC m=+154.507869036" watchObservedRunningTime="2026-01-30 00:11:07.920310603 +0000 UTC m=+154.511782351" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.923441 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.925104 4885 generic.go:334] "Generic (PLEG): container finished" podID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerID="016c6504227cf26925de196719eff21c23226afb0d56ab2fcfe8fbb824bf2bc0" exitCode=0 Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.925200 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jmbh5" event={"ID":"dffca359-3f77-47e0-999e-ec7b5d72176d","Type":"ContainerDied","Data":"016c6504227cf26925de196719eff21c23226afb0d56ab2fcfe8fbb824bf2bc0"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.925232 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jmbh5" event={"ID":"dffca359-3f77-47e0-999e-ec7b5d72176d","Type":"ContainerStarted","Data":"74bf72ee91effaba0bf86d1759e19fa17863ca9196e2e2290cdef90a99ea8d15"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.933625 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gd928"] Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.945058 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerStarted","Data":"17dd55955db1d932df42861b7ee36dfdbbe7f323bae8a5deb40222b54ce7eb90"} Jan 30 00:11:07 crc kubenswrapper[4885]: I0130 00:11:07.945118 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerStarted","Data":"0999419e1a187a61c43efb33e8aa34490d9ba965d47fe7cd4ac75a913272ba86"} Jan 30 00:11:07 crc kubenswrapper[4885]: W0130 00:11:07.980689 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfaedfaad_6883_471e_9a4e_d15cc6b969d7.slice/crio-7093931cbcca622e8bc123e559cef3063867a4488c83e548536f8c2bcbdf57e3 WatchSource:0}: Error finding container 7093931cbcca622e8bc123e559cef3063867a4488c83e548536f8c2bcbdf57e3: Status 404 returned error can't find the container with id 7093931cbcca622e8bc123e559cef3063867a4488c83e548536f8c2bcbdf57e3 Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.180292 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:08 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:08 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:08 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.180359 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.197675 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.556921 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.677552 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dnhsm"] Jan 30 00:11:08 crc kubenswrapper[4885]: W0130 00:11:08.692849 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3f0f887_4427_41fa_a495_470f6a1da8ae.slice/crio-6fc7c8693bc21d76ef3115df9816e0361f3acd338a6e37cc4ddafe3406cbadf9 WatchSource:0}: Error finding container 6fc7c8693bc21d76ef3115df9816e0361f3acd338a6e37cc4ddafe3406cbadf9: Status 404 returned error can't find the container with id 6fc7c8693bc21d76ef3115df9816e0361f3acd338a6e37cc4ddafe3406cbadf9 Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.696631 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kube-api-access\") pod \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.696691 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kubelet-dir\") pod \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\" (UID: \"2fa78fe3-5bb2-4645-a1e3-194d7a060741\") " Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.697009 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2fa78fe3-5bb2-4645-a1e3-194d7a060741" (UID: "2fa78fe3-5bb2-4645-a1e3-194d7a060741"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.697098 4885 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.707445 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2fa78fe3-5bb2-4645-a1e3-194d7a060741" (UID: "2fa78fe3-5bb2-4645-a1e3-194d7a060741"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.717466 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 00:11:08 crc kubenswrapper[4885]: E0130 00:11:08.718122 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fa78fe3-5bb2-4645-a1e3-194d7a060741" containerName="pruner" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.718213 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fa78fe3-5bb2-4645-a1e3-194d7a060741" containerName="pruner" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.718422 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fa78fe3-5bb2-4645-a1e3-194d7a060741" containerName="pruner" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.719071 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.723097 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.727056 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.731816 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.756595 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wng4c"] Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.798467 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fa78fe3-5bb2-4645-a1e3-194d7a060741-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.899329 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a914bb8d-3680-41ab-ab7e-21f512dd5630-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.899387 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a914bb8d-3680-41ab-ab7e-21f512dd5630-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.962491 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2fa78fe3-5bb2-4645-a1e3-194d7a060741","Type":"ContainerDied","Data":"bd1e74da72367b2ddfd22f5cabc26d2f863879b48ba54a49b2903e7895b5f661"} Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.962534 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd1e74da72367b2ddfd22f5cabc26d2f863879b48ba54a49b2903e7895b5f661" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.962544 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.977222 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerStarted","Data":"b5ea85e4fea80ed8d1711329022d891ae49e96be0d3373e5c95ed4048a7712ff"} Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.981086 4885 generic.go:334] "Generic (PLEG): container finished" podID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerID="582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4" exitCode=0 Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.981156 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gd928" event={"ID":"faedfaad-6883-471e-9a4e-d15cc6b969d7","Type":"ContainerDied","Data":"582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4"} Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.981182 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gd928" event={"ID":"faedfaad-6883-471e-9a4e-d15cc6b969d7","Type":"ContainerStarted","Data":"7093931cbcca622e8bc123e559cef3063867a4488c83e548536f8c2bcbdf57e3"} Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.989682 4885 generic.go:334] "Generic (PLEG): container finished" podID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerID="17dd55955db1d932df42861b7ee36dfdbbe7f323bae8a5deb40222b54ce7eb90" exitCode=0 Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.989835 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerDied","Data":"17dd55955db1d932df42861b7ee36dfdbbe7f323bae8a5deb40222b54ce7eb90"} Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.997559 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" event={"ID":"d3f0f887-4427-41fa-a495-470f6a1da8ae","Type":"ContainerStarted","Data":"6fc7c8693bc21d76ef3115df9816e0361f3acd338a6e37cc4ddafe3406cbadf9"} Jan 30 00:11:08 crc kubenswrapper[4885]: I0130 00:11:08.998327 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.000624 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a914bb8d-3680-41ab-ab7e-21f512dd5630-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.000759 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a914bb8d-3680-41ab-ab7e-21f512dd5630-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.000919 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a914bb8d-3680-41ab-ab7e-21f512dd5630-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.036702 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" podStartSLOduration=130.036674729 podStartE2EDuration="2m10.036674729s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:09.031805249 +0000 UTC m=+155.623277007" watchObservedRunningTime="2026-01-30 00:11:09.036674729 +0000 UTC m=+155.628146477" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.040875 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a914bb8d-3680-41ab-ab7e-21f512dd5630-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.065521 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.141071 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:09 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:09 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:09 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.141161 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.689504 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 30 00:11:09 crc kubenswrapper[4885]: W0130 00:11:09.770456 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda914bb8d_3680_41ab_ab7e_21f512dd5630.slice/crio-aa1dcd368f4e6aeafe2bae69228fe3155446c9b4de2726acd63b70b6c4183e6c WatchSource:0}: Error finding container aa1dcd368f4e6aeafe2bae69228fe3155446c9b4de2726acd63b70b6c4183e6c: Status 404 returned error can't find the container with id aa1dcd368f4e6aeafe2bae69228fe3155446c9b4de2726acd63b70b6c4183e6c Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.935009 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:11:09 crc kubenswrapper[4885]: I0130 00:11:09.946468 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-xw5nc" Jan 30 00:11:10 crc kubenswrapper[4885]: I0130 00:11:10.034844 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a914bb8d-3680-41ab-ab7e-21f512dd5630","Type":"ContainerStarted","Data":"aa1dcd368f4e6aeafe2bae69228fe3155446c9b4de2726acd63b70b6c4183e6c"} Jan 30 00:11:10 crc kubenswrapper[4885]: I0130 00:11:10.065201 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" event={"ID":"d3f0f887-4427-41fa-a495-470f6a1da8ae","Type":"ContainerStarted","Data":"0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8"} Jan 30 00:11:10 crc kubenswrapper[4885]: I0130 00:11:10.149525 4885 generic.go:334] "Generic (PLEG): container finished" podID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerID="45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140" exitCode=0 Jan 30 00:11:10 crc kubenswrapper[4885]: I0130 00:11:10.153703 4885 patch_prober.go:28] interesting pod/router-default-5444994796-fvdhv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 30 00:11:10 crc kubenswrapper[4885]: [-]has-synced failed: reason withheld Jan 30 00:11:10 crc kubenswrapper[4885]: [+]process-running ok Jan 30 00:11:10 crc kubenswrapper[4885]: healthz check failed Jan 30 00:11:10 crc kubenswrapper[4885]: I0130 00:11:10.153779 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-fvdhv" podUID="a7093ca4-c09c-4031-ba6f-e7fc85890480" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 30 00:11:10 crc kubenswrapper[4885]: I0130 00:11:10.164443 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerDied","Data":"45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140"} Jan 30 00:11:11 crc kubenswrapper[4885]: I0130 00:11:11.141114 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:11:11 crc kubenswrapper[4885]: I0130 00:11:11.150391 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-fvdhv" Jan 30 00:11:11 crc kubenswrapper[4885]: I0130 00:11:11.189359 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a914bb8d-3680-41ab-ab7e-21f512dd5630","Type":"ContainerStarted","Data":"e5767a08da6eba2d5f7e4d7ac051138b54a0ff90dcba392a63c6e21e5ca36665"} Jan 30 00:11:11 crc kubenswrapper[4885]: I0130 00:11:11.245961 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.245935382 podStartE2EDuration="3.245935382s" podCreationTimestamp="2026-01-30 00:11:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:11.245304635 +0000 UTC m=+157.836776383" watchObservedRunningTime="2026-01-30 00:11:11.245935382 +0000 UTC m=+157.837407140" Jan 30 00:11:12 crc kubenswrapper[4885]: I0130 00:11:12.278532 4885 generic.go:334] "Generic (PLEG): container finished" podID="a914bb8d-3680-41ab-ab7e-21f512dd5630" containerID="e5767a08da6eba2d5f7e4d7ac051138b54a0ff90dcba392a63c6e21e5ca36665" exitCode=0 Jan 30 00:11:12 crc kubenswrapper[4885]: I0130 00:11:12.278591 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a914bb8d-3680-41ab-ab7e-21f512dd5630","Type":"ContainerDied","Data":"e5767a08da6eba2d5f7e4d7ac051138b54a0ff90dcba392a63c6e21e5ca36665"} Jan 30 00:11:13 crc kubenswrapper[4885]: I0130 00:11:13.467573 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:11:13 crc kubenswrapper[4885]: I0130 00:11:13.928116 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.049220 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a914bb8d-3680-41ab-ab7e-21f512dd5630-kube-api-access\") pod \"a914bb8d-3680-41ab-ab7e-21f512dd5630\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.049388 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a914bb8d-3680-41ab-ab7e-21f512dd5630-kubelet-dir\") pod \"a914bb8d-3680-41ab-ab7e-21f512dd5630\" (UID: \"a914bb8d-3680-41ab-ab7e-21f512dd5630\") " Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.049895 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a914bb8d-3680-41ab-ab7e-21f512dd5630-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a914bb8d-3680-41ab-ab7e-21f512dd5630" (UID: "a914bb8d-3680-41ab-ab7e-21f512dd5630"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.062802 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a914bb8d-3680-41ab-ab7e-21f512dd5630-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a914bb8d-3680-41ab-ab7e-21f512dd5630" (UID: "a914bb8d-3680-41ab-ab7e-21f512dd5630"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.151930 4885 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a914bb8d-3680-41ab-ab7e-21f512dd5630-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.152246 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a914bb8d-3680-41ab-ab7e-21f512dd5630-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.368035 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a914bb8d-3680-41ab-ab7e-21f512dd5630","Type":"ContainerDied","Data":"aa1dcd368f4e6aeafe2bae69228fe3155446c9b4de2726acd63b70b6c4183e6c"} Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.368452 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa1dcd368f4e6aeafe2bae69228fe3155446c9b4de2726acd63b70b6c4183e6c" Jan 30 00:11:14 crc kubenswrapper[4885]: I0130 00:11:14.368204 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 30 00:11:15 crc kubenswrapper[4885]: I0130 00:11:15.147008 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:15 crc kubenswrapper[4885]: I0130 00:11:15.147079 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:15 crc kubenswrapper[4885]: I0130 00:11:15.147155 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:15 crc kubenswrapper[4885]: I0130 00:11:15.147223 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:15 crc kubenswrapper[4885]: I0130 00:11:15.831061 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:11:15 crc kubenswrapper[4885]: I0130 00:11:15.842785 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2bgb9" Jan 30 00:11:21 crc kubenswrapper[4885]: I0130 00:11:21.924028 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:11:21 crc kubenswrapper[4885]: I0130 00:11:21.931967 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/313f7566-bae9-4b9c-8c30-9e3c7aef8364-metrics-certs\") pod \"network-metrics-daemon-hg2nk\" (UID: \"313f7566-bae9-4b9c-8c30-9e3c7aef8364\") " pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:11:21 crc kubenswrapper[4885]: I0130 00:11:21.998323 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-hg2nk" Jan 30 00:11:22 crc kubenswrapper[4885]: I0130 00:11:22.892596 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2f8ww"] Jan 30 00:11:22 crc kubenswrapper[4885]: I0130 00:11:22.893436 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" containerID="cri-o://19292bb7922049d4245dba96da233b16ba1b4e3160e29e5ba750c586d41e4d4b" gracePeriod=30 Jan 30 00:11:22 crc kubenswrapper[4885]: I0130 00:11:22.905313 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8"] Jan 30 00:11:22 crc kubenswrapper[4885]: I0130 00:11:22.905573 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" containerID="cri-o://c8a5344263a429acaee41f9c27d92523a71645e1606d4d13fc5b599bf391989b" gracePeriod=30 Jan 30 00:11:24 crc kubenswrapper[4885]: I0130 00:11:24.548940 4885 generic.go:334] "Generic (PLEG): container finished" podID="929bbe65-1902-453c-bebf-4e833b325ab1" containerID="c8a5344263a429acaee41f9c27d92523a71645e1606d4d13fc5b599bf391989b" exitCode=0 Jan 30 00:11:24 crc kubenswrapper[4885]: I0130 00:11:24.549056 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" event={"ID":"929bbe65-1902-453c-bebf-4e833b325ab1","Type":"ContainerDied","Data":"c8a5344263a429acaee41f9c27d92523a71645e1606d4d13fc5b599bf391989b"} Jan 30 00:11:24 crc kubenswrapper[4885]: I0130 00:11:24.552457 4885 generic.go:334] "Generic (PLEG): container finished" podID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerID="19292bb7922049d4245dba96da233b16ba1b4e3160e29e5ba750c586d41e4d4b" exitCode=0 Jan 30 00:11:24 crc kubenswrapper[4885]: I0130 00:11:24.552530 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" event={"ID":"5bd56c34-d51d-4f93-975b-d5c96f11b7f5","Type":"ContainerDied","Data":"19292bb7922049d4245dba96da233b16ba1b4e3160e29e5ba750c586d41e4d4b"} Jan 30 00:11:24 crc kubenswrapper[4885]: I0130 00:11:24.999164 4885 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2f8ww container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 30 00:11:24 crc kubenswrapper[4885]: I0130 00:11:24.999244 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.031708 4885 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-mhgj8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.031837 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.147764 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.147922 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.149915 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.150354 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.151139 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.153167 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.153245 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.154297 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"a1c6e760a7ec3b83dd530a31fafe50c34b3f40b503dddf8c593801e478f1ff56"} pod="openshift-console/downloads-7954f5f757-qtxcm" containerMessage="Container download-server failed liveness probe, will be restarted" Jan 30 00:11:25 crc kubenswrapper[4885]: I0130 00:11:25.154641 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" containerID="cri-o://a1c6e760a7ec3b83dd530a31fafe50c34b3f40b503dddf8c593801e478f1ff56" gracePeriod=2 Jan 30 00:11:27 crc kubenswrapper[4885]: I0130 00:11:27.867519 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:11:30 crc kubenswrapper[4885]: I0130 00:11:30.144456 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:11:30 crc kubenswrapper[4885]: I0130 00:11:30.144557 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:11:32 crc kubenswrapper[4885]: I0130 00:11:32.603912 4885 generic.go:334] "Generic (PLEG): container finished" podID="c59baa8a-ba27-4ef6-9d63-a0a25b597f7e" containerID="90e2837a2e429ec1f476b964cf9aaf2fc5515b45e35fd03769fd5f61a650cb79" exitCode=0 Jan 30 00:11:32 crc kubenswrapper[4885]: I0130 00:11:32.604043 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29495520-c9vgk" event={"ID":"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e","Type":"ContainerDied","Data":"90e2837a2e429ec1f476b964cf9aaf2fc5515b45e35fd03769fd5f61a650cb79"} Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.032076 4885 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-mhgj8 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.032614 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.148338 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.148926 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.599844 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vjhs9" Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.633711 4885 generic.go:334] "Generic (PLEG): container finished" podID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerID="a1c6e760a7ec3b83dd530a31fafe50c34b3f40b503dddf8c593801e478f1ff56" exitCode=0 Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.633799 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qtxcm" event={"ID":"0eb0e632-fc50-4845-aa1b-4aab2bb7826b","Type":"ContainerDied","Data":"a1c6e760a7ec3b83dd530a31fafe50c34b3f40b503dddf8c593801e478f1ff56"} Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.998597 4885 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-2f8ww container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 00:11:35 crc kubenswrapper[4885]: I0130 00:11:35.998718 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.171675 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.192584 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.242997 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243097 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-64879fc986-8mgtg"] Jan 30 00:11:39 crc kubenswrapper[4885]: E0130 00:11:39.243365 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c59baa8a-ba27-4ef6-9d63-a0a25b597f7e" containerName="image-pruner" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243380 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="c59baa8a-ba27-4ef6-9d63-a0a25b597f7e" containerName="image-pruner" Jan 30 00:11:39 crc kubenswrapper[4885]: E0130 00:11:39.243396 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243403 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" Jan 30 00:11:39 crc kubenswrapper[4885]: E0130 00:11:39.243412 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a914bb8d-3680-41ab-ab7e-21f512dd5630" containerName="pruner" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243421 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="a914bb8d-3680-41ab-ab7e-21f512dd5630" containerName="pruner" Jan 30 00:11:39 crc kubenswrapper[4885]: E0130 00:11:39.243431 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243440 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243541 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" containerName="route-controller-manager" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243557 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="c59baa8a-ba27-4ef6-9d63-a0a25b597f7e" containerName="image-pruner" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243565 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="a914bb8d-3680-41ab-ab7e-21f512dd5630" containerName="pruner" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.243574 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" containerName="controller-manager" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.244187 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.258086 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-64879fc986-8mgtg"] Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337039 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-client-ca\") pod \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337124 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-serviceca\") pod \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337172 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-config\") pod \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337194 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-proxy-ca-bundles\") pod \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337219 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7mw2\" (UniqueName: \"kubernetes.io/projected/929bbe65-1902-453c-bebf-4e833b325ab1-kube-api-access-j7mw2\") pod \"929bbe65-1902-453c-bebf-4e833b325ab1\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337257 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsz56\" (UniqueName: \"kubernetes.io/projected/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-kube-api-access-bsz56\") pod \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\" (UID: \"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337289 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-client-ca\") pod \"929bbe65-1902-453c-bebf-4e833b325ab1\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337312 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-config\") pod \"929bbe65-1902-453c-bebf-4e833b325ab1\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337335 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbfdw\" (UniqueName: \"kubernetes.io/projected/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-kube-api-access-hbfdw\") pod \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337380 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929bbe65-1902-453c-bebf-4e833b325ab1-serving-cert\") pod \"929bbe65-1902-453c-bebf-4e833b325ab1\" (UID: \"929bbe65-1902-453c-bebf-4e833b325ab1\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337432 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-serving-cert\") pod \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\" (UID: \"5bd56c34-d51d-4f93-975b-d5c96f11b7f5\") " Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337581 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-config\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337654 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-client-ca\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337683 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3df54d6-7efc-412d-9b56-ac3806d2c49e-serving-cert\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337711 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-proxy-ca-bundles\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.337741 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jd4m\" (UniqueName: \"kubernetes.io/projected/e3df54d6-7efc-412d-9b56-ac3806d2c49e-kube-api-access-6jd4m\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.338381 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5bd56c34-d51d-4f93-975b-d5c96f11b7f5" (UID: "5bd56c34-d51d-4f93-975b-d5c96f11b7f5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.338416 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-config" (OuterVolumeSpecName: "config") pod "5bd56c34-d51d-4f93-975b-d5c96f11b7f5" (UID: "5bd56c34-d51d-4f93-975b-d5c96f11b7f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.338459 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-config" (OuterVolumeSpecName: "config") pod "929bbe65-1902-453c-bebf-4e833b325ab1" (UID: "929bbe65-1902-453c-bebf-4e833b325ab1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.338924 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-client-ca" (OuterVolumeSpecName: "client-ca") pod "5bd56c34-d51d-4f93-975b-d5c96f11b7f5" (UID: "5bd56c34-d51d-4f93-975b-d5c96f11b7f5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.339031 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-serviceca" (OuterVolumeSpecName: "serviceca") pod "c59baa8a-ba27-4ef6-9d63-a0a25b597f7e" (UID: "c59baa8a-ba27-4ef6-9d63-a0a25b597f7e"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.339270 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-client-ca" (OuterVolumeSpecName: "client-ca") pod "929bbe65-1902-453c-bebf-4e833b325ab1" (UID: "929bbe65-1902-453c-bebf-4e833b325ab1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.347856 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5bd56c34-d51d-4f93-975b-d5c96f11b7f5" (UID: "5bd56c34-d51d-4f93-975b-d5c96f11b7f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.349936 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/929bbe65-1902-453c-bebf-4e833b325ab1-kube-api-access-j7mw2" (OuterVolumeSpecName: "kube-api-access-j7mw2") pod "929bbe65-1902-453c-bebf-4e833b325ab1" (UID: "929bbe65-1902-453c-bebf-4e833b325ab1"). InnerVolumeSpecName "kube-api-access-j7mw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.353005 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-kube-api-access-bsz56" (OuterVolumeSpecName: "kube-api-access-bsz56") pod "c59baa8a-ba27-4ef6-9d63-a0a25b597f7e" (UID: "c59baa8a-ba27-4ef6-9d63-a0a25b597f7e"). InnerVolumeSpecName "kube-api-access-bsz56". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.355957 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/929bbe65-1902-453c-bebf-4e833b325ab1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "929bbe65-1902-453c-bebf-4e833b325ab1" (UID: "929bbe65-1902-453c-bebf-4e833b325ab1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.356395 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-kube-api-access-hbfdw" (OuterVolumeSpecName: "kube-api-access-hbfdw") pod "5bd56c34-d51d-4f93-975b-d5c96f11b7f5" (UID: "5bd56c34-d51d-4f93-975b-d5c96f11b7f5"). InnerVolumeSpecName "kube-api-access-hbfdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439173 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-client-ca\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439226 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3df54d6-7efc-412d-9b56-ac3806d2c49e-serving-cert\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439253 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-proxy-ca-bundles\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439286 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jd4m\" (UniqueName: \"kubernetes.io/projected/e3df54d6-7efc-412d-9b56-ac3806d2c49e-kube-api-access-6jd4m\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439312 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-config\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439369 4885 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-serviceca\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439380 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439390 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439401 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7mw2\" (UniqueName: \"kubernetes.io/projected/929bbe65-1902-453c-bebf-4e833b325ab1-kube-api-access-j7mw2\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439409 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsz56\" (UniqueName: \"kubernetes.io/projected/c59baa8a-ba27-4ef6-9d63-a0a25b597f7e-kube-api-access-bsz56\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439419 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439427 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929bbe65-1902-453c-bebf-4e833b325ab1-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439437 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbfdw\" (UniqueName: \"kubernetes.io/projected/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-kube-api-access-hbfdw\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439447 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929bbe65-1902-453c-bebf-4e833b325ab1-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439458 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.439468 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bd56c34-d51d-4f93-975b-d5c96f11b7f5-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.440608 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-client-ca\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.440839 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-config\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.441671 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-proxy-ca-bundles\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.447272 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3df54d6-7efc-412d-9b56-ac3806d2c49e-serving-cert\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.457722 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jd4m\" (UniqueName: \"kubernetes.io/projected/e3df54d6-7efc-412d-9b56-ac3806d2c49e-kube-api-access-6jd4m\") pod \"controller-manager-64879fc986-8mgtg\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.564256 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.654806 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" event={"ID":"5bd56c34-d51d-4f93-975b-d5c96f11b7f5","Type":"ContainerDied","Data":"63cfdbcec84f8f6d931a34e0ddf652a9e78e266a729b1603d83def89c53527d3"} Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.654829 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2f8ww" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.654874 4885 scope.go:117] "RemoveContainer" containerID="19292bb7922049d4245dba96da233b16ba1b4e3160e29e5ba750c586d41e4d4b" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.657516 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.657820 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8" event={"ID":"929bbe65-1902-453c-bebf-4e833b325ab1","Type":"ContainerDied","Data":"e4d4537ab91da298d4352b122eb8b678311dc7e51faff49a6ae56089b5f27e84"} Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.660940 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29495520-c9vgk" event={"ID":"c59baa8a-ba27-4ef6-9d63-a0a25b597f7e","Type":"ContainerDied","Data":"43b4a4ac1de95958d0cbf74cc941926ee82f3928958882a5d089446522eb360a"} Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.661016 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43b4a4ac1de95958d0cbf74cc941926ee82f3928958882a5d089446522eb360a" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.661066 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29495520-c9vgk" Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.684857 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2f8ww"] Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.701076 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2f8ww"] Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.708542 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8"] Jan 30 00:11:39 crc kubenswrapper[4885]: I0130 00:11:39.713212 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mhgj8"] Jan 30 00:11:40 crc kubenswrapper[4885]: I0130 00:11:40.149117 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bd56c34-d51d-4f93-975b-d5c96f11b7f5" path="/var/lib/kubelet/pods/5bd56c34-d51d-4f93-975b-d5c96f11b7f5/volumes" Jan 30 00:11:40 crc kubenswrapper[4885]: I0130 00:11:40.150000 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="929bbe65-1902-453c-bebf-4e833b325ab1" path="/var/lib/kubelet/pods/929bbe65-1902-453c-bebf-4e833b325ab1/volumes" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.519734 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl"] Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.521084 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.525214 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.525315 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.525439 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.525552 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.525706 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.526034 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.554484 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl"] Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.668596 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0166258-aa22-4b69-89fa-2b617b7e5f2c-serving-cert\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.668825 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x4kd\" (UniqueName: \"kubernetes.io/projected/b0166258-aa22-4b69-89fa-2b617b7e5f2c-kube-api-access-9x4kd\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.668953 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-client-ca\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.669069 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-config\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.770105 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-config\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.770183 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0166258-aa22-4b69-89fa-2b617b7e5f2c-serving-cert\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.770242 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x4kd\" (UniqueName: \"kubernetes.io/projected/b0166258-aa22-4b69-89fa-2b617b7e5f2c-kube-api-access-9x4kd\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.770304 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-client-ca\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.771742 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-client-ca\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.776024 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-config\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.787039 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x4kd\" (UniqueName: \"kubernetes.io/projected/b0166258-aa22-4b69-89fa-2b617b7e5f2c-kube-api-access-9x4kd\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.792388 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0166258-aa22-4b69-89fa-2b617b7e5f2c-serving-cert\") pod \"route-controller-manager-dddc87f8-254pl\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.851803 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:41 crc kubenswrapper[4885]: I0130 00:11:41.915345 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 30 00:11:42 crc kubenswrapper[4885]: I0130 00:11:42.863115 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-64879fc986-8mgtg"] Jan 30 00:11:42 crc kubenswrapper[4885]: I0130 00:11:42.962827 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl"] Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.313736 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.326758 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.329829 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.330436 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.377389 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.501325 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.501427 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.603280 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.603359 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.603491 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.627807 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:43 crc kubenswrapper[4885]: I0130 00:11:43.687837 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:44 crc kubenswrapper[4885]: E0130 00:11:44.656873 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 30 00:11:44 crc kubenswrapper[4885]: E0130 00:11:44.657536 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q7rn9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-jmbh5_openshift-marketplace(dffca359-3f77-47e0-999e-ec7b5d72176d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:44 crc kubenswrapper[4885]: E0130 00:11:44.658812 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-jmbh5" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" Jan 30 00:11:44 crc kubenswrapper[4885]: E0130 00:11:44.700718 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 30 00:11:44 crc kubenswrapper[4885]: E0130 00:11:44.701214 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mkn5v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7fh97_openshift-marketplace(fdd3cba5-cf61-40cd-8c88-d289887fbf8a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:44 crc kubenswrapper[4885]: E0130 00:11:44.702524 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7fh97" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" Jan 30 00:11:45 crc kubenswrapper[4885]: I0130 00:11:45.147090 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:45 crc kubenswrapper[4885]: I0130 00:11:45.147156 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:45 crc kubenswrapper[4885]: E0130 00:11:45.847664 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7fh97" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" Jan 30 00:11:45 crc kubenswrapper[4885]: E0130 00:11:45.848411 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-jmbh5" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" Jan 30 00:11:45 crc kubenswrapper[4885]: E0130 00:11:45.929262 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 30 00:11:45 crc kubenswrapper[4885]: E0130 00:11:45.929437 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4jxqt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-n4nxr_openshift-marketplace(ab0a81e5-1af3-4340-a412-b0ee0d506468): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:45 crc kubenswrapper[4885]: E0130 00:11:45.930634 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-n4nxr" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.265860 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-n4nxr" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.318597 4885 scope.go:117] "RemoveContainer" containerID="c8a5344263a429acaee41f9c27d92523a71645e1606d4d13fc5b599bf391989b" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.332750 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.332940 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ktsg6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bw6zc_openshift-marketplace(9af08248-ac20-4708-8753-bd2d97ad46a6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.334141 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bw6zc" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.346304 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.348574 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lr8bd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mfdx7_openshift-marketplace(14fd8cd4-0faa-45da-a532-9528073cfe8e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.350625 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mfdx7" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.420155 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.420555 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9vzbr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-wng4c_openshift-marketplace(7f0d9e5f-67f9-4f87-8546-8e12d68513e9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.422557 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-wng4c" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.452994 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.453254 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nz8hz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-v5294_openshift-marketplace(c1ac97f9-b076-40c9-80fc-a2f6111d313b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.454796 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-v5294" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.715880 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-v5294" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.717139 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mfdx7" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.717271 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-bw6zc" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" Jan 30 00:11:47 crc kubenswrapper[4885]: E0130 00:11:47.719042 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-wng4c" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.808505 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.815687 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-hg2nk"] Jan 30 00:11:47 crc kubenswrapper[4885]: W0130 00:11:47.830809 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod313f7566_bae9_4b9c_8c30_9e3c7aef8364.slice/crio-40a007348c9167ec626a11d759af1ae2be8df9aeaf6f4c5ef16665a3c38d05e9 WatchSource:0}: Error finding container 40a007348c9167ec626a11d759af1ae2be8df9aeaf6f4c5ef16665a3c38d05e9: Status 404 returned error can't find the container with id 40a007348c9167ec626a11d759af1ae2be8df9aeaf6f4c5ef16665a3c38d05e9 Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.868659 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl"] Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.877124 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-64879fc986-8mgtg"] Jan 30 00:11:47 crc kubenswrapper[4885]: W0130 00:11:47.884387 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0166258_aa22_4b69_89fa_2b617b7e5f2c.slice/crio-c3bbfb12ac0e7ef16ee3ac9d0e252951b675f0e2ce2b2c5b4eb789f806007500 WatchSource:0}: Error finding container c3bbfb12ac0e7ef16ee3ac9d0e252951b675f0e2ce2b2c5b4eb789f806007500: Status 404 returned error can't find the container with id c3bbfb12ac0e7ef16ee3ac9d0e252951b675f0e2ce2b2c5b4eb789f806007500 Jan 30 00:11:47 crc kubenswrapper[4885]: W0130 00:11:47.890392 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3df54d6_7efc_412d_9b56_ac3806d2c49e.slice/crio-397af4bfdf720e576f7209efeacaa4f0819ff2fc0b77bfd25727c27fe34fc13f WatchSource:0}: Error finding container 397af4bfdf720e576f7209efeacaa4f0819ff2fc0b77bfd25727c27fe34fc13f: Status 404 returned error can't find the container with id 397af4bfdf720e576f7209efeacaa4f0819ff2fc0b77bfd25727c27fe34fc13f Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.915759 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.916931 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:47 crc kubenswrapper[4885]: I0130 00:11:47.920628 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.069913 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-var-lock\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.069966 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.070595 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kube-api-access\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.171921 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kube-api-access\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.172362 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-var-lock\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.172393 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.172432 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-var-lock\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.172491 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.192390 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kube-api-access\") pod \"installer-9-crc\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.232797 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.659310 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 30 00:11:48 crc kubenswrapper[4885]: W0130 00:11:48.690085 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd94abf5b_36a7_4e56_9f8f_1bbc4f729676.slice/crio-aa780c5babaa742deb0bb1ddfd9ac07fd463eaa09a2508d39fe2e4de8581b5be WatchSource:0}: Error finding container aa780c5babaa742deb0bb1ddfd9ac07fd463eaa09a2508d39fe2e4de8581b5be: Status 404 returned error can't find the container with id aa780c5babaa742deb0bb1ddfd9ac07fd463eaa09a2508d39fe2e4de8581b5be Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.725335 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-qtxcm" event={"ID":"0eb0e632-fc50-4845-aa1b-4aab2bb7826b","Type":"ContainerStarted","Data":"43285d4f7db58590709221e7dd74530bc9c7e7b5941d892ce591c4a693452494"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.725840 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.727464 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.727527 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.727552 4885 generic.go:334] "Generic (PLEG): container finished" podID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerID="598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2" exitCode=0 Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.727631 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gd928" event={"ID":"faedfaad-6883-471e-9a4e-d15cc6b969d7","Type":"ContainerDied","Data":"598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.729813 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" event={"ID":"b0166258-aa22-4b69-89fa-2b617b7e5f2c","Type":"ContainerStarted","Data":"7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.729871 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" event={"ID":"b0166258-aa22-4b69-89fa-2b617b7e5f2c","Type":"ContainerStarted","Data":"c3bbfb12ac0e7ef16ee3ac9d0e252951b675f0e2ce2b2c5b4eb789f806007500"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.729958 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" podUID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" containerName="route-controller-manager" containerID="cri-o://7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd" gracePeriod=30 Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.730175 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.756510 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" event={"ID":"e3df54d6-7efc-412d-9b56-ac3806d2c49e","Type":"ContainerStarted","Data":"1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.756573 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" event={"ID":"e3df54d6-7efc-412d-9b56-ac3806d2c49e","Type":"ContainerStarted","Data":"397af4bfdf720e576f7209efeacaa4f0819ff2fc0b77bfd25727c27fe34fc13f"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.756945 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerName="controller-manager" containerID="cri-o://1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8" gracePeriod=30 Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.758977 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.775841 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d94abf5b-36a7-4e56-9f8f-1bbc4f729676","Type":"ContainerStarted","Data":"aa780c5babaa742deb0bb1ddfd9ac07fd463eaa09a2508d39fe2e4de8581b5be"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.777953 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0bb2ca57-12f0-4653-b7fe-dc1935925eec","Type":"ContainerStarted","Data":"efe360e2364af6546c25385f8cd31430375d7df0489c7feccdbf6de925231396"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.777983 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0bb2ca57-12f0-4653-b7fe-dc1935925eec","Type":"ContainerStarted","Data":"ee073f73a1374bffcaa34989565eb6e8421852a76579c51283b0cde1a67a49c0"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.781416 4885 patch_prober.go:28] interesting pod/controller-manager-64879fc986-8mgtg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": read tcp 10.217.0.2:55908->10.217.0.55:8443: read: connection reset by peer" start-of-body= Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.781642 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": read tcp 10.217.0.2:55908->10.217.0.55:8443: read: connection reset by peer" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.786726 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" event={"ID":"313f7566-bae9-4b9c-8c30-9e3c7aef8364","Type":"ContainerStarted","Data":"efff806394378a417ebb71e4041600e70671b73e3eef50a7abaa87a1076e7bee"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.786927 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" event={"ID":"313f7566-bae9-4b9c-8c30-9e3c7aef8364","Type":"ContainerStarted","Data":"bf3683f7d3891e86ca8711b6339530b66d7cad0eb4fe30f3cb4ead1e9b4c5fe1"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.787072 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-hg2nk" event={"ID":"313f7566-bae9-4b9c-8c30-9e3c7aef8364","Type":"ContainerStarted","Data":"40a007348c9167ec626a11d759af1ae2be8df9aeaf6f4c5ef16665a3c38d05e9"} Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.789948 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" podStartSLOduration=26.78991746 podStartE2EDuration="26.78991746s" podCreationTimestamp="2026-01-30 00:11:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:48.789366995 +0000 UTC m=+195.380838753" watchObservedRunningTime="2026-01-30 00:11:48.78991746 +0000 UTC m=+195.381389208" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.841235 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" podStartSLOduration=26.841211484 podStartE2EDuration="26.841211484s" podCreationTimestamp="2026-01-30 00:11:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:48.833058756 +0000 UTC m=+195.424530504" watchObservedRunningTime="2026-01-30 00:11:48.841211484 +0000 UTC m=+195.432683232" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.854239 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=5.85375879 podStartE2EDuration="5.85375879s" podCreationTimestamp="2026-01-30 00:11:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:48.851006936 +0000 UTC m=+195.442478684" watchObservedRunningTime="2026-01-30 00:11:48.85375879 +0000 UTC m=+195.445230538" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.873142 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-hg2nk" podStartSLOduration=169.873120489 podStartE2EDuration="2m49.873120489s" podCreationTimestamp="2026-01-30 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:48.866592244 +0000 UTC m=+195.458063982" watchObservedRunningTime="2026-01-30 00:11:48.873120489 +0000 UTC m=+195.464592237" Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.996548 4885 patch_prober.go:28] interesting pod/route-controller-manager-dddc87f8-254pl container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.56:8443/healthz\": read tcp 10.217.0.2:52358->10.217.0.56:8443: read: connection reset by peer" start-of-body= Jan 30 00:11:48 crc kubenswrapper[4885]: I0130 00:11:48.996632 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" podUID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.56:8443/healthz\": read tcp 10.217.0.2:52358->10.217.0.56:8443: read: connection reset by peer" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.572170 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-dddc87f8-254pl_b0166258-aa22-4b69-89fa-2b617b7e5f2c/route-controller-manager/0.log" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.572685 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.613203 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h"] Jan 30 00:11:49 crc kubenswrapper[4885]: E0130 00:11:49.613547 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" containerName="route-controller-manager" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.613572 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" containerName="route-controller-manager" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.613796 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" containerName="route-controller-manager" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.614332 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.616206 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h"] Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.688273 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.700013 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0166258-aa22-4b69-89fa-2b617b7e5f2c-serving-cert\") pod \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.700138 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-client-ca\") pod \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.700170 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9x4kd\" (UniqueName: \"kubernetes.io/projected/b0166258-aa22-4b69-89fa-2b617b7e5f2c-kube-api-access-9x4kd\") pod \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.701178 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-client-ca" (OuterVolumeSpecName: "client-ca") pod "b0166258-aa22-4b69-89fa-2b617b7e5f2c" (UID: "b0166258-aa22-4b69-89fa-2b617b7e5f2c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.701253 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-config\") pod \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\" (UID: \"b0166258-aa22-4b69-89fa-2b617b7e5f2c\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.701688 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-config" (OuterVolumeSpecName: "config") pod "b0166258-aa22-4b69-89fa-2b617b7e5f2c" (UID: "b0166258-aa22-4b69-89fa-2b617b7e5f2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.702088 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-client-ca\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.702193 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfkmj\" (UniqueName: \"kubernetes.io/projected/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-kube-api-access-qfkmj\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.702287 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-config\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.702315 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-serving-cert\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.702364 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.702378 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0166258-aa22-4b69-89fa-2b617b7e5f2c-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.710406 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0166258-aa22-4b69-89fa-2b617b7e5f2c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b0166258-aa22-4b69-89fa-2b617b7e5f2c" (UID: "b0166258-aa22-4b69-89fa-2b617b7e5f2c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.710428 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0166258-aa22-4b69-89fa-2b617b7e5f2c-kube-api-access-9x4kd" (OuterVolumeSpecName: "kube-api-access-9x4kd") pod "b0166258-aa22-4b69-89fa-2b617b7e5f2c" (UID: "b0166258-aa22-4b69-89fa-2b617b7e5f2c"). InnerVolumeSpecName "kube-api-access-9x4kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.795900 4885 generic.go:334] "Generic (PLEG): container finished" podID="0bb2ca57-12f0-4653-b7fe-dc1935925eec" containerID="efe360e2364af6546c25385f8cd31430375d7df0489c7feccdbf6de925231396" exitCode=0 Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.795982 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0bb2ca57-12f0-4653-b7fe-dc1935925eec","Type":"ContainerDied","Data":"efe360e2364af6546c25385f8cd31430375d7df0489c7feccdbf6de925231396"} Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.798390 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d94abf5b-36a7-4e56-9f8f-1bbc4f729676","Type":"ContainerStarted","Data":"7a9aaf1703b91aa981ba91143618ed1dd0fd8185d2c02d1969154a797fb64b54"} Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.802225 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-dddc87f8-254pl_b0166258-aa22-4b69-89fa-2b617b7e5f2c/route-controller-manager/0.log" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.802281 4885 generic.go:334] "Generic (PLEG): container finished" podID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" containerID="7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd" exitCode=255 Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.802339 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" event={"ID":"b0166258-aa22-4b69-89fa-2b617b7e5f2c","Type":"ContainerDied","Data":"7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd"} Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.802367 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" event={"ID":"b0166258-aa22-4b69-89fa-2b617b7e5f2c","Type":"ContainerDied","Data":"c3bbfb12ac0e7ef16ee3ac9d0e252951b675f0e2ce2b2c5b4eb789f806007500"} Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.802395 4885 scope.go:117] "RemoveContainer" containerID="7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.802560 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.807292 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3df54d6-7efc-412d-9b56-ac3806d2c49e-serving-cert\") pod \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.809042 4885 generic.go:334] "Generic (PLEG): container finished" podID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerID="1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8" exitCode=0 Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.809679 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-config\") pod \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.809806 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-proxy-ca-bundles\") pod \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.809878 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jd4m\" (UniqueName: \"kubernetes.io/projected/e3df54d6-7efc-412d-9b56-ac3806d2c49e-kube-api-access-6jd4m\") pod \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.809920 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-client-ca\") pod \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\" (UID: \"e3df54d6-7efc-412d-9b56-ac3806d2c49e\") " Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.809961 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.810044 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" event={"ID":"e3df54d6-7efc-412d-9b56-ac3806d2c49e","Type":"ContainerDied","Data":"1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8"} Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.810076 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" event={"ID":"e3df54d6-7efc-412d-9b56-ac3806d2c49e","Type":"ContainerDied","Data":"397af4bfdf720e576f7209efeacaa4f0819ff2fc0b77bfd25727c27fe34fc13f"} Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.810196 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-config\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.810229 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-serving-cert\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.810471 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-client-ca\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.810552 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfkmj\" (UniqueName: \"kubernetes.io/projected/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-kube-api-access-qfkmj\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.811009 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.811065 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.811949 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-config" (OuterVolumeSpecName: "config") pod "e3df54d6-7efc-412d-9b56-ac3806d2c49e" (UID: "e3df54d6-7efc-412d-9b56-ac3806d2c49e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.812885 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e3df54d6-7efc-412d-9b56-ac3806d2c49e" (UID: "e3df54d6-7efc-412d-9b56-ac3806d2c49e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.817025 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-config\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.817310 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-client-ca" (OuterVolumeSpecName: "client-ca") pod "e3df54d6-7efc-412d-9b56-ac3806d2c49e" (UID: "e3df54d6-7efc-412d-9b56-ac3806d2c49e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.817957 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3df54d6-7efc-412d-9b56-ac3806d2c49e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e3df54d6-7efc-412d-9b56-ac3806d2c49e" (UID: "e3df54d6-7efc-412d-9b56-ac3806d2c49e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.821023 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3df54d6-7efc-412d-9b56-ac3806d2c49e-kube-api-access-6jd4m" (OuterVolumeSpecName: "kube-api-access-6jd4m") pod "e3df54d6-7efc-412d-9b56-ac3806d2c49e" (UID: "e3df54d6-7efc-412d-9b56-ac3806d2c49e"). InnerVolumeSpecName "kube-api-access-6jd4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.823262 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-client-ca\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.824425 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0166258-aa22-4b69-89fa-2b617b7e5f2c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.824455 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9x4kd\" (UniqueName: \"kubernetes.io/projected/b0166258-aa22-4b69-89fa-2b617b7e5f2c-kube-api-access-9x4kd\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.832091 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-serving-cert\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.840825 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfkmj\" (UniqueName: \"kubernetes.io/projected/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-kube-api-access-qfkmj\") pod \"route-controller-manager-754dd49d58-ds48h\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.845532 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.845508768 podStartE2EDuration="2.845508768s" podCreationTimestamp="2026-01-30 00:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:49.840514854 +0000 UTC m=+196.431986612" watchObservedRunningTime="2026-01-30 00:11:49.845508768 +0000 UTC m=+196.436980516" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.865129 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl"] Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.868844 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-dddc87f8-254pl"] Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.898172 4885 scope.go:117] "RemoveContainer" containerID="7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd" Jan 30 00:11:49 crc kubenswrapper[4885]: E0130 00:11:49.900072 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd\": container with ID starting with 7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd not found: ID does not exist" containerID="7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.900131 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd"} err="failed to get container status \"7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd\": rpc error: code = NotFound desc = could not find container \"7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd\": container with ID starting with 7e0a60eb079c248e49cc58dcc0a33782305e87abc8ceccdfb97dd03bf8f8a9fd not found: ID does not exist" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.900166 4885 scope.go:117] "RemoveContainer" containerID="1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.916268 4885 scope.go:117] "RemoveContainer" containerID="1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8" Jan 30 00:11:49 crc kubenswrapper[4885]: E0130 00:11:49.917341 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8\": container with ID starting with 1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8 not found: ID does not exist" containerID="1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.917466 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8"} err="failed to get container status \"1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8\": rpc error: code = NotFound desc = could not find container \"1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8\": container with ID starting with 1083a5e555dfe9555399bf201f007fcbacba3aa7d112efa2d4b896e366af95c8 not found: ID does not exist" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.926166 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3df54d6-7efc-412d-9b56-ac3806d2c49e-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.927375 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.927495 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.927580 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jd4m\" (UniqueName: \"kubernetes.io/projected/e3df54d6-7efc-412d-9b56-ac3806d2c49e-kube-api-access-6jd4m\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.927856 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e3df54d6-7efc-412d-9b56-ac3806d2c49e-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:49 crc kubenswrapper[4885]: I0130 00:11:49.932045 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.151881 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0166258-aa22-4b69-89fa-2b617b7e5f2c" path="/var/lib/kubelet/pods/b0166258-aa22-4b69-89fa-2b617b7e5f2c/volumes" Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.153828 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-64879fc986-8mgtg"] Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.157306 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-64879fc986-8mgtg"] Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.172163 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h"] Jan 30 00:11:50 crc kubenswrapper[4885]: W0130 00:11:50.179848 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96a8dcc3_6c8d_43bb_a380_8d98fa09e13c.slice/crio-9261b45046e4a956f887e2911aaf4cf5ccc2cf43e1657f91775d6acc3cf004c9 WatchSource:0}: Error finding container 9261b45046e4a956f887e2911aaf4cf5ccc2cf43e1657f91775d6acc3cf004c9: Status 404 returned error can't find the container with id 9261b45046e4a956f887e2911aaf4cf5ccc2cf43e1657f91775d6acc3cf004c9 Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.565540 4885 patch_prober.go:28] interesting pod/controller-manager-64879fc986-8mgtg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: i/o timeout" start-of-body= Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.566069 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-64879fc986-8mgtg" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: i/o timeout" Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.821790 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gd928" event={"ID":"faedfaad-6883-471e-9a4e-d15cc6b969d7","Type":"ContainerStarted","Data":"cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8"} Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.824740 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" event={"ID":"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c","Type":"ContainerStarted","Data":"1c7bd5d0e5ae5f34ff4a5912dcdfdc2d571fe061d194bd6cbca17b60ea3d170a"} Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.824820 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" event={"ID":"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c","Type":"ContainerStarted","Data":"9261b45046e4a956f887e2911aaf4cf5ccc2cf43e1657f91775d6acc3cf004c9"} Jan 30 00:11:50 crc kubenswrapper[4885]: I0130 00:11:50.857012 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gd928" podStartSLOduration=2.946588524 podStartE2EDuration="43.856993384s" podCreationTimestamp="2026-01-30 00:11:07 +0000 UTC" firstStartedPulling="2026-01-30 00:11:08.987647936 +0000 UTC m=+155.579119684" lastFinishedPulling="2026-01-30 00:11:49.898052796 +0000 UTC m=+196.489524544" observedRunningTime="2026-01-30 00:11:50.852284389 +0000 UTC m=+197.443756137" watchObservedRunningTime="2026-01-30 00:11:50.856993384 +0000 UTC m=+197.448465132" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.075917 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.096565 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" podStartSLOduration=9.096535602 podStartE2EDuration="9.096535602s" podCreationTimestamp="2026-01-30 00:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:50.879035275 +0000 UTC m=+197.470507023" watchObservedRunningTime="2026-01-30 00:11:51.096535602 +0000 UTC m=+197.688007390" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.157063 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kubelet-dir\") pod \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.157159 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kube-api-access\") pod \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\" (UID: \"0bb2ca57-12f0-4653-b7fe-dc1935925eec\") " Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.157151 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0bb2ca57-12f0-4653-b7fe-dc1935925eec" (UID: "0bb2ca57-12f0-4653-b7fe-dc1935925eec"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.157628 4885 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.190409 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0bb2ca57-12f0-4653-b7fe-dc1935925eec" (UID: "0bb2ca57-12f0-4653-b7fe-dc1935925eec"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.258722 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0bb2ca57-12f0-4653-b7fe-dc1935925eec-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.837238 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0bb2ca57-12f0-4653-b7fe-dc1935925eec","Type":"ContainerDied","Data":"ee073f73a1374bffcaa34989565eb6e8421852a76579c51283b0cde1a67a49c0"} Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.837497 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee073f73a1374bffcaa34989565eb6e8421852a76579c51283b0cde1a67a49c0" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.837370 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.837825 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:51 crc kubenswrapper[4885]: I0130 00:11:51.846341 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.151016 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" path="/var/lib/kubelet/pods/e3df54d6-7efc-412d-9b56-ac3806d2c49e/volumes" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.528190 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f7c7b995d-59scj"] Jan 30 00:11:52 crc kubenswrapper[4885]: E0130 00:11:52.528504 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerName="controller-manager" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.528522 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerName="controller-manager" Jan 30 00:11:52 crc kubenswrapper[4885]: E0130 00:11:52.528534 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bb2ca57-12f0-4653-b7fe-dc1935925eec" containerName="pruner" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.528540 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bb2ca57-12f0-4653-b7fe-dc1935925eec" containerName="pruner" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.528645 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bb2ca57-12f0-4653-b7fe-dc1935925eec" containerName="pruner" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.528658 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3df54d6-7efc-412d-9b56-ac3806d2c49e" containerName="controller-manager" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.529086 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.533126 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.533632 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.534040 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.534262 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.534411 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.534907 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.542421 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.549873 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c7b995d-59scj"] Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.577299 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-proxy-ca-bundles\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.577362 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-client-ca\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.577420 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rrj4\" (UniqueName: \"kubernetes.io/projected/738fdbfc-508c-4141-8932-e0d7f94e8e27-kube-api-access-8rrj4\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.577462 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738fdbfc-508c-4141-8932-e0d7f94e8e27-serving-cert\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.577491 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-config\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.678820 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-client-ca\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.678915 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rrj4\" (UniqueName: \"kubernetes.io/projected/738fdbfc-508c-4141-8932-e0d7f94e8e27-kube-api-access-8rrj4\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.678965 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738fdbfc-508c-4141-8932-e0d7f94e8e27-serving-cert\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.678990 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-config\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.679032 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-proxy-ca-bundles\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.680182 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-proxy-ca-bundles\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.680754 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-config\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.686507 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738fdbfc-508c-4141-8932-e0d7f94e8e27-serving-cert\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.690925 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-client-ca\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.701742 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rrj4\" (UniqueName: \"kubernetes.io/projected/738fdbfc-508c-4141-8932-e0d7f94e8e27-kube-api-access-8rrj4\") pod \"controller-manager-7f7c7b995d-59scj\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:52 crc kubenswrapper[4885]: I0130 00:11:52.845693 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:53 crc kubenswrapper[4885]: I0130 00:11:53.054022 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c7b995d-59scj"] Jan 30 00:11:53 crc kubenswrapper[4885]: I0130 00:11:53.848593 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" event={"ID":"738fdbfc-508c-4141-8932-e0d7f94e8e27","Type":"ContainerStarted","Data":"16ee39fee72e8f39b95fcb0a0a7c8bb26e72e4f3eda56a6371e21cab24f98d00"} Jan 30 00:11:53 crc kubenswrapper[4885]: I0130 00:11:53.849010 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" event={"ID":"738fdbfc-508c-4141-8932-e0d7f94e8e27","Type":"ContainerStarted","Data":"ddd628dda2fa88759fa1813a6d26d31b9f45929ba97de346a5ab7c89f3f19b78"} Jan 30 00:11:53 crc kubenswrapper[4885]: I0130 00:11:53.867517 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" podStartSLOduration=11.867495692 podStartE2EDuration="11.867495692s" podCreationTimestamp="2026-01-30 00:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:11:53.866042073 +0000 UTC m=+200.457513851" watchObservedRunningTime="2026-01-30 00:11:53.867495692 +0000 UTC m=+200.458967440" Jan 30 00:11:54 crc kubenswrapper[4885]: I0130 00:11:54.854724 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:54 crc kubenswrapper[4885]: I0130 00:11:54.862010 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:11:55 crc kubenswrapper[4885]: I0130 00:11:55.149298 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:55 crc kubenswrapper[4885]: I0130 00:11:55.149332 4885 patch_prober.go:28] interesting pod/downloads-7954f5f757-qtxcm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Jan 30 00:11:55 crc kubenswrapper[4885]: I0130 00:11:55.149368 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:55 crc kubenswrapper[4885]: I0130 00:11:55.149417 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-qtxcm" podUID="0eb0e632-fc50-4845-aa1b-4aab2bb7826b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Jan 30 00:11:57 crc kubenswrapper[4885]: I0130 00:11:57.469915 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:57 crc kubenswrapper[4885]: I0130 00:11:57.469993 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:11:58 crc kubenswrapper[4885]: I0130 00:11:58.677474 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gd928" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="registry-server" probeResult="failure" output=< Jan 30 00:11:58 crc kubenswrapper[4885]: timeout: failed to connect service ":50051" within 1s Jan 30 00:11:58 crc kubenswrapper[4885]: > Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.143521 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.144206 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.153698 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.155035 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339"} pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.155184 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" containerID="cri-o://f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339" gracePeriod=600 Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.891631 4885 generic.go:334] "Generic (PLEG): container finished" podID="41b99e9c-eadb-404c-9596-1b102ac85157" containerID="f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339" exitCode=0 Jan 30 00:12:00 crc kubenswrapper[4885]: I0130 00:12:00.891708 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerDied","Data":"f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339"} Jan 30 00:12:02 crc kubenswrapper[4885]: I0130 00:12:02.914449 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c7b995d-59scj"] Jan 30 00:12:02 crc kubenswrapper[4885]: I0130 00:12:02.915258 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" podUID="738fdbfc-508c-4141-8932-e0d7f94e8e27" containerName="controller-manager" containerID="cri-o://16ee39fee72e8f39b95fcb0a0a7c8bb26e72e4f3eda56a6371e21cab24f98d00" gracePeriod=30 Jan 30 00:12:02 crc kubenswrapper[4885]: I0130 00:12:02.964591 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h"] Jan 30 00:12:02 crc kubenswrapper[4885]: I0130 00:12:02.964893 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" podUID="96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" containerName="route-controller-manager" containerID="cri-o://1c7bd5d0e5ae5f34ff4a5912dcdfdc2d571fe061d194bd6cbca17b60ea3d170a" gracePeriod=30 Jan 30 00:12:03 crc kubenswrapper[4885]: I0130 00:12:03.917284 4885 generic.go:334] "Generic (PLEG): container finished" podID="738fdbfc-508c-4141-8932-e0d7f94e8e27" containerID="16ee39fee72e8f39b95fcb0a0a7c8bb26e72e4f3eda56a6371e21cab24f98d00" exitCode=0 Jan 30 00:12:03 crc kubenswrapper[4885]: I0130 00:12:03.917389 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" event={"ID":"738fdbfc-508c-4141-8932-e0d7f94e8e27","Type":"ContainerDied","Data":"16ee39fee72e8f39b95fcb0a0a7c8bb26e72e4f3eda56a6371e21cab24f98d00"} Jan 30 00:12:03 crc kubenswrapper[4885]: I0130 00:12:03.923815 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" event={"ID":"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c","Type":"ContainerDied","Data":"1c7bd5d0e5ae5f34ff4a5912dcdfdc2d571fe061d194bd6cbca17b60ea3d170a"} Jan 30 00:12:03 crc kubenswrapper[4885]: I0130 00:12:03.924002 4885 generic.go:334] "Generic (PLEG): container finished" podID="96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" containerID="1c7bd5d0e5ae5f34ff4a5912dcdfdc2d571fe061d194bd6cbca17b60ea3d170a" exitCode=0 Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.208129 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-p87kw"] Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.560350 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.603375 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w"] Jan 30 00:12:04 crc kubenswrapper[4885]: E0130 00:12:04.603667 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" containerName="route-controller-manager" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.603686 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" containerName="route-controller-manager" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.603833 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" containerName="route-controller-manager" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.604365 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.605326 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.623735 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w"] Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.678757 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-client-ca\") pod \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.678911 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-serving-cert\") pod \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.678961 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfkmj\" (UniqueName: \"kubernetes.io/projected/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-kube-api-access-qfkmj\") pod \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.678992 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-config\") pod \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\" (UID: \"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.682064 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-client-ca" (OuterVolumeSpecName: "client-ca") pod "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" (UID: "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.682076 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-config" (OuterVolumeSpecName: "config") pod "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" (UID: "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.689997 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-kube-api-access-qfkmj" (OuterVolumeSpecName: "kube-api-access-qfkmj") pod "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" (UID: "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c"). InnerVolumeSpecName "kube-api-access-qfkmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.699027 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" (UID: "96a8dcc3-6c8d-43bb-a380-8d98fa09e13c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.779749 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rrj4\" (UniqueName: \"kubernetes.io/projected/738fdbfc-508c-4141-8932-e0d7f94e8e27-kube-api-access-8rrj4\") pod \"738fdbfc-508c-4141-8932-e0d7f94e8e27\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780128 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-proxy-ca-bundles\") pod \"738fdbfc-508c-4141-8932-e0d7f94e8e27\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780219 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-config\") pod \"738fdbfc-508c-4141-8932-e0d7f94e8e27\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780296 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738fdbfc-508c-4141-8932-e0d7f94e8e27-serving-cert\") pod \"738fdbfc-508c-4141-8932-e0d7f94e8e27\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780382 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-client-ca\") pod \"738fdbfc-508c-4141-8932-e0d7f94e8e27\" (UID: \"738fdbfc-508c-4141-8932-e0d7f94e8e27\") " Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780633 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82414781-e2d1-4742-9758-b2bdd1dc8061-serving-cert\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780725 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-config\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780835 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfp4d\" (UniqueName: \"kubernetes.io/projected/82414781-e2d1-4742-9758-b2bdd1dc8061-kube-api-access-bfp4d\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.780921 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-client-ca\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.781059 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.781123 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.782304 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfkmj\" (UniqueName: \"kubernetes.io/projected/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-kube-api-access-qfkmj\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.782382 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.781756 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "738fdbfc-508c-4141-8932-e0d7f94e8e27" (UID: "738fdbfc-508c-4141-8932-e0d7f94e8e27"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.782272 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-client-ca" (OuterVolumeSpecName: "client-ca") pod "738fdbfc-508c-4141-8932-e0d7f94e8e27" (UID: "738fdbfc-508c-4141-8932-e0d7f94e8e27"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.782372 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-config" (OuterVolumeSpecName: "config") pod "738fdbfc-508c-4141-8932-e0d7f94e8e27" (UID: "738fdbfc-508c-4141-8932-e0d7f94e8e27"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.787494 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/738fdbfc-508c-4141-8932-e0d7f94e8e27-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "738fdbfc-508c-4141-8932-e0d7f94e8e27" (UID: "738fdbfc-508c-4141-8932-e0d7f94e8e27"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.787591 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/738fdbfc-508c-4141-8932-e0d7f94e8e27-kube-api-access-8rrj4" (OuterVolumeSpecName: "kube-api-access-8rrj4") pod "738fdbfc-508c-4141-8932-e0d7f94e8e27" (UID: "738fdbfc-508c-4141-8932-e0d7f94e8e27"). InnerVolumeSpecName "kube-api-access-8rrj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.883721 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82414781-e2d1-4742-9758-b2bdd1dc8061-serving-cert\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884173 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-config\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884200 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfp4d\" (UniqueName: \"kubernetes.io/projected/82414781-e2d1-4742-9758-b2bdd1dc8061-kube-api-access-bfp4d\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884231 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-client-ca\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884317 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884331 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rrj4\" (UniqueName: \"kubernetes.io/projected/738fdbfc-508c-4141-8932-e0d7f94e8e27-kube-api-access-8rrj4\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884342 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884351 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738fdbfc-508c-4141-8932-e0d7f94e8e27-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.884359 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738fdbfc-508c-4141-8932-e0d7f94e8e27-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.885527 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-client-ca\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.885709 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-config\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.887161 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82414781-e2d1-4742-9758-b2bdd1dc8061-serving-cert\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.905375 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfp4d\" (UniqueName: \"kubernetes.io/projected/82414781-e2d1-4742-9758-b2bdd1dc8061-kube-api-access-bfp4d\") pod \"route-controller-manager-7f6f76dccb-kt84w\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.932550 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerStarted","Data":"b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88"} Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.934548 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" event={"ID":"738fdbfc-508c-4141-8932-e0d7f94e8e27","Type":"ContainerDied","Data":"ddd628dda2fa88759fa1813a6d26d31b9f45929ba97de346a5ab7c89f3f19b78"} Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.934647 4885 scope.go:117] "RemoveContainer" containerID="16ee39fee72e8f39b95fcb0a0a7c8bb26e72e4f3eda56a6371e21cab24f98d00" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.934827 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f7c7b995d-59scj" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.935757 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.941162 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" event={"ID":"96a8dcc3-6c8d-43bb-a380-8d98fa09e13c","Type":"ContainerDied","Data":"9261b45046e4a956f887e2911aaf4cf5ccc2cf43e1657f91775d6acc3cf004c9"} Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.941335 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h" Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.943669 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"d0446ae30b16adbc17a7c39638b35405117acdb3790f74fd73290c8db7c38e19"} Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.945904 4885 generic.go:334] "Generic (PLEG): container finished" podID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerID="b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d" exitCode=0 Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.946089 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fh97" event={"ID":"fdd3cba5-cf61-40cd-8c88-d289887fbf8a","Type":"ContainerDied","Data":"b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d"} Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.950732 4885 generic.go:334] "Generic (PLEG): container finished" podID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerID="5ec6cd48b4011c36d3be8a7b7465fbd07428b401164c87ac6f721b3bf71b1e82" exitCode=0 Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.950979 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jmbh5" event={"ID":"dffca359-3f77-47e0-999e-ec7b5d72176d","Type":"ContainerDied","Data":"5ec6cd48b4011c36d3be8a7b7465fbd07428b401164c87ac6f721b3bf71b1e82"} Jan 30 00:12:04 crc kubenswrapper[4885]: I0130 00:12:04.954642 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerStarted","Data":"141e08e7d89ac8e919208c1c7db55ee46fe0f9a6d855c1ed1488159b4e63ecb5"} Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.099444 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h"] Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.103132 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-754dd49d58-ds48h"] Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.113968 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c7b995d-59scj"] Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.117038 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7f7c7b995d-59scj"] Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.153821 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-qtxcm" Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.476009 4885 scope.go:117] "RemoveContainer" containerID="1c7bd5d0e5ae5f34ff4a5912dcdfdc2d571fe061d194bd6cbca17b60ea3d170a" Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.963689 4885 generic.go:334] "Generic (PLEG): container finished" podID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerID="141e08e7d89ac8e919208c1c7db55ee46fe0f9a6d855c1ed1488159b4e63ecb5" exitCode=0 Jan 30 00:12:05 crc kubenswrapper[4885]: I0130 00:12:05.963787 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerDied","Data":"141e08e7d89ac8e919208c1c7db55ee46fe0f9a6d855c1ed1488159b4e63ecb5"} Jan 30 00:12:06 crc kubenswrapper[4885]: I0130 00:12:06.155997 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="738fdbfc-508c-4141-8932-e0d7f94e8e27" path="/var/lib/kubelet/pods/738fdbfc-508c-4141-8932-e0d7f94e8e27/volumes" Jan 30 00:12:06 crc kubenswrapper[4885]: I0130 00:12:06.157322 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96a8dcc3-6c8d-43bb-a380-8d98fa09e13c" path="/var/lib/kubelet/pods/96a8dcc3-6c8d-43bb-a380-8d98fa09e13c/volumes" Jan 30 00:12:06 crc kubenswrapper[4885]: E0130 00:12:06.190757 4885 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f0d9e5f_67f9_4f87_8546_8e12d68513e9.slice/crio-conmon-b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88.scope\": RecentStats: unable to find data in memory cache]" Jan 30 00:12:06 crc kubenswrapper[4885]: I0130 00:12:06.986644 4885 generic.go:334] "Generic (PLEG): container finished" podID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerID="b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88" exitCode=0 Jan 30 00:12:06 crc kubenswrapper[4885]: I0130 00:12:06.987365 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerDied","Data":"b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88"} Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.202798 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w"] Jan 30 00:12:07 crc kubenswrapper[4885]: W0130 00:12:07.207197 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82414781_e2d1_4742_9758_b2bdd1dc8061.slice/crio-a0e383bc373242d3ec43807583aa6fa6b42b33be6ae828d077e9c90d23577744 WatchSource:0}: Error finding container a0e383bc373242d3ec43807583aa6fa6b42b33be6ae828d077e9c90d23577744: Status 404 returned error can't find the container with id a0e383bc373242d3ec43807583aa6fa6b42b33be6ae828d077e9c90d23577744 Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.532337 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.544511 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-85ddbf476-446zt"] Jan 30 00:12:07 crc kubenswrapper[4885]: E0130 00:12:07.545038 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="738fdbfc-508c-4141-8932-e0d7f94e8e27" containerName="controller-manager" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.545080 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="738fdbfc-508c-4141-8932-e0d7f94e8e27" containerName="controller-manager" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.545286 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="738fdbfc-508c-4141-8932-e0d7f94e8e27" containerName="controller-manager" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.546220 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.549478 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.549731 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.550182 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.550418 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.550495 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.551919 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.556267 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.562322 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85ddbf476-446zt"] Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.604944 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.654553 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-config\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.654914 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-client-ca\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.655094 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc8bq\" (UniqueName: \"kubernetes.io/projected/5a2c48e3-e437-44e2-a16a-4243f19bb405-kube-api-access-kc8bq\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.655346 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a2c48e3-e437-44e2-a16a-4243f19bb405-serving-cert\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.655475 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-proxy-ca-bundles\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.756462 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-client-ca\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.756522 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc8bq\" (UniqueName: \"kubernetes.io/projected/5a2c48e3-e437-44e2-a16a-4243f19bb405-kube-api-access-kc8bq\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.756565 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a2c48e3-e437-44e2-a16a-4243f19bb405-serving-cert\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.756591 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-proxy-ca-bundles\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.756672 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-config\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.757954 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-proxy-ca-bundles\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.758110 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-config\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.759964 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-client-ca\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.769876 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a2c48e3-e437-44e2-a16a-4243f19bb405-serving-cert\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.793405 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc8bq\" (UniqueName: \"kubernetes.io/projected/5a2c48e3-e437-44e2-a16a-4243f19bb405-kube-api-access-kc8bq\") pod \"controller-manager-85ddbf476-446zt\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:07 crc kubenswrapper[4885]: I0130 00:12:07.871896 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.002199 4885 generic.go:334] "Generic (PLEG): container finished" podID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerID="67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc" exitCode=0 Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.002281 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bw6zc" event={"ID":"9af08248-ac20-4708-8753-bd2d97ad46a6","Type":"ContainerDied","Data":"67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.005154 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerStarted","Data":"1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.008836 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerStarted","Data":"9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.012957 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fh97" event={"ID":"fdd3cba5-cf61-40cd-8c88-d289887fbf8a","Type":"ContainerStarted","Data":"3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.014997 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jmbh5" event={"ID":"dffca359-3f77-47e0-999e-ec7b5d72176d","Type":"ContainerStarted","Data":"9e98b1ac523c55774442e0b0331d86559c525b25f5ac9809138b6ddcaecea561"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.017097 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" event={"ID":"82414781-e2d1-4742-9758-b2bdd1dc8061","Type":"ContainerStarted","Data":"298d822e4b848ebdbc43024d85bb439b289910093f8678f6979c206a6fc04eb9"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.017116 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" event={"ID":"82414781-e2d1-4742-9758-b2bdd1dc8061","Type":"ContainerStarted","Data":"a0e383bc373242d3ec43807583aa6fa6b42b33be6ae828d077e9c90d23577744"} Jan 30 00:12:08 crc kubenswrapper[4885]: I0130 00:12:08.140268 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85ddbf476-446zt"] Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.024389 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" event={"ID":"5a2c48e3-e437-44e2-a16a-4243f19bb405","Type":"ContainerStarted","Data":"545970e3b548272a9ac5cb312e3b89fd37243b1ee6d6dfdaf96712396ddd5fd2"} Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.024813 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" event={"ID":"5a2c48e3-e437-44e2-a16a-4243f19bb405","Type":"ContainerStarted","Data":"c5d30785d4c708f579eaa90a61ebd33cb204c3214f87f50a829639195b06df11"} Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.027335 4885 generic.go:334] "Generic (PLEG): container finished" podID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerID="1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56" exitCode=0 Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.027388 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerDied","Data":"1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56"} Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.029188 4885 generic.go:334] "Generic (PLEG): container finished" podID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerID="9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a" exitCode=0 Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.029280 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerDied","Data":"9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a"} Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.079059 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7fh97" podStartSLOduration=6.133629348 podStartE2EDuration="1m5.079034869s" podCreationTimestamp="2026-01-30 00:11:04 +0000 UTC" firstStartedPulling="2026-01-30 00:11:07.897331067 +0000 UTC m=+154.488802815" lastFinishedPulling="2026-01-30 00:12:06.842736588 +0000 UTC m=+213.434208336" observedRunningTime="2026-01-30 00:12:09.075041743 +0000 UTC m=+215.666513511" watchObservedRunningTime="2026-01-30 00:12:09.079034869 +0000 UTC m=+215.670506627" Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.140243 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jmbh5" podStartSLOduration=6.322548538 podStartE2EDuration="1m5.140219174s" podCreationTimestamp="2026-01-30 00:11:04 +0000 UTC" firstStartedPulling="2026-01-30 00:11:07.928375709 +0000 UTC m=+154.519847457" lastFinishedPulling="2026-01-30 00:12:06.746046345 +0000 UTC m=+213.337518093" observedRunningTime="2026-01-30 00:12:09.13334866 +0000 UTC m=+215.724820408" watchObservedRunningTime="2026-01-30 00:12:09.140219174 +0000 UTC m=+215.731690922" Jan 30 00:12:09 crc kubenswrapper[4885]: I0130 00:12:09.161231 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" podStartSLOduration=6.161210634 podStartE2EDuration="6.161210634s" podCreationTimestamp="2026-01-30 00:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:12:09.158151332 +0000 UTC m=+215.749623090" watchObservedRunningTime="2026-01-30 00:12:09.161210634 +0000 UTC m=+215.752682382" Jan 30 00:12:10 crc kubenswrapper[4885]: I0130 00:12:10.037160 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:10 crc kubenswrapper[4885]: I0130 00:12:10.043573 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:10 crc kubenswrapper[4885]: I0130 00:12:10.079505 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" podStartSLOduration=8.079480899 podStartE2EDuration="8.079480899s" podCreationTimestamp="2026-01-30 00:12:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:12:10.05519957 +0000 UTC m=+216.646671318" watchObservedRunningTime="2026-01-30 00:12:10.079480899 +0000 UTC m=+216.670952647" Jan 30 00:12:12 crc kubenswrapper[4885]: I0130 00:12:12.047798 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerStarted","Data":"a500c379dbb74ff2b549563d425b93b3eff73eb00bb7f2e0381cb2fb3c21a28c"} Jan 30 00:12:12 crc kubenswrapper[4885]: I0130 00:12:12.066246 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n4nxr" podStartSLOduration=2.861206756 podStartE2EDuration="1m6.066225331s" podCreationTimestamp="2026-01-30 00:11:06 +0000 UTC" firstStartedPulling="2026-01-30 00:11:07.97505296 +0000 UTC m=+154.566524708" lastFinishedPulling="2026-01-30 00:12:11.180071525 +0000 UTC m=+217.771543283" observedRunningTime="2026-01-30 00:12:12.065142903 +0000 UTC m=+218.656614651" watchObservedRunningTime="2026-01-30 00:12:12.066225331 +0000 UTC m=+218.657697079" Jan 30 00:12:14 crc kubenswrapper[4885]: I0130 00:12:14.936056 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:14 crc kubenswrapper[4885]: I0130 00:12:14.946946 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.071808 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerStarted","Data":"c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f"} Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.092577 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v5294" podStartSLOduration=5.595016223 podStartE2EDuration="1m12.092552521s" podCreationTimestamp="2026-01-30 00:11:03 +0000 UTC" firstStartedPulling="2026-01-30 00:11:06.682162774 +0000 UTC m=+153.273634522" lastFinishedPulling="2026-01-30 00:12:13.179699062 +0000 UTC m=+219.771170820" observedRunningTime="2026-01-30 00:12:15.088594655 +0000 UTC m=+221.680066443" watchObservedRunningTime="2026-01-30 00:12:15.092552521 +0000 UTC m=+221.684024279" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.421082 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.421444 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.449148 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.449205 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.471165 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:12:15 crc kubenswrapper[4885]: I0130 00:12:15.489514 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:12:16 crc kubenswrapper[4885]: I0130 00:12:16.128957 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:12:16 crc kubenswrapper[4885]: I0130 00:12:16.152141 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:12:16 crc kubenswrapper[4885]: I0130 00:12:16.379317 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jmbh5"] Jan 30 00:12:16 crc kubenswrapper[4885]: I0130 00:12:16.852654 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:12:16 crc kubenswrapper[4885]: I0130 00:12:16.853223 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:12:16 crc kubenswrapper[4885]: I0130 00:12:16.924247 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:12:17 crc kubenswrapper[4885]: I0130 00:12:17.144152 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:12:18 crc kubenswrapper[4885]: I0130 00:12:18.092806 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jmbh5" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="registry-server" containerID="cri-o://9e98b1ac523c55774442e0b0331d86559c525b25f5ac9809138b6ddcaecea561" gracePeriod=2 Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.103159 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerStarted","Data":"ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9"} Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.106054 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bw6zc" event={"ID":"9af08248-ac20-4708-8753-bd2d97ad46a6","Type":"ContainerStarted","Data":"ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41"} Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.108512 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerStarted","Data":"f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560"} Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.111276 4885 generic.go:334] "Generic (PLEG): container finished" podID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerID="9e98b1ac523c55774442e0b0331d86559c525b25f5ac9809138b6ddcaecea561" exitCode=0 Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.111365 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jmbh5" event={"ID":"dffca359-3f77-47e0-999e-ec7b5d72176d","Type":"ContainerDied","Data":"9e98b1ac523c55774442e0b0331d86559c525b25f5ac9809138b6ddcaecea561"} Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.111416 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jmbh5" event={"ID":"dffca359-3f77-47e0-999e-ec7b5d72176d","Type":"ContainerDied","Data":"74bf72ee91effaba0bf86d1759e19fa17863ca9196e2e2290cdef90a99ea8d15"} Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.111435 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74bf72ee91effaba0bf86d1759e19fa17863ca9196e2e2290cdef90a99ea8d15" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.119876 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.133039 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wng4c" podStartSLOduration=3.997844648 podStartE2EDuration="1m12.133016736s" podCreationTimestamp="2026-01-30 00:11:07 +0000 UTC" firstStartedPulling="2026-01-30 00:11:10.1532289 +0000 UTC m=+156.744700638" lastFinishedPulling="2026-01-30 00:12:18.288400978 +0000 UTC m=+224.879872726" observedRunningTime="2026-01-30 00:12:19.131024262 +0000 UTC m=+225.722496010" watchObservedRunningTime="2026-01-30 00:12:19.133016736 +0000 UTC m=+225.724488484" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.153480 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bw6zc" podStartSLOduration=4.527925042 podStartE2EDuration="1m15.153452091s" podCreationTimestamp="2026-01-30 00:11:04 +0000 UTC" firstStartedPulling="2026-01-30 00:11:06.562115559 +0000 UTC m=+153.153587307" lastFinishedPulling="2026-01-30 00:12:17.187642568 +0000 UTC m=+223.779114356" observedRunningTime="2026-01-30 00:12:19.150631006 +0000 UTC m=+225.742102744" watchObservedRunningTime="2026-01-30 00:12:19.153452091 +0000 UTC m=+225.744923839" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.168618 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mfdx7" podStartSLOduration=2.722367577 podStartE2EDuration="1m13.168601027s" podCreationTimestamp="2026-01-30 00:11:06 +0000 UTC" firstStartedPulling="2026-01-30 00:11:07.832125361 +0000 UTC m=+154.423597109" lastFinishedPulling="2026-01-30 00:12:18.278358811 +0000 UTC m=+224.869830559" observedRunningTime="2026-01-30 00:12:19.167976819 +0000 UTC m=+225.759448567" watchObservedRunningTime="2026-01-30 00:12:19.168601027 +0000 UTC m=+225.760072765" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.179414 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n4nxr"] Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.272220 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-utilities\") pod \"dffca359-3f77-47e0-999e-ec7b5d72176d\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.272290 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7rn9\" (UniqueName: \"kubernetes.io/projected/dffca359-3f77-47e0-999e-ec7b5d72176d-kube-api-access-q7rn9\") pod \"dffca359-3f77-47e0-999e-ec7b5d72176d\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.272515 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-catalog-content\") pod \"dffca359-3f77-47e0-999e-ec7b5d72176d\" (UID: \"dffca359-3f77-47e0-999e-ec7b5d72176d\") " Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.276873 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-utilities" (OuterVolumeSpecName: "utilities") pod "dffca359-3f77-47e0-999e-ec7b5d72176d" (UID: "dffca359-3f77-47e0-999e-ec7b5d72176d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.293106 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dffca359-3f77-47e0-999e-ec7b5d72176d-kube-api-access-q7rn9" (OuterVolumeSpecName: "kube-api-access-q7rn9") pod "dffca359-3f77-47e0-999e-ec7b5d72176d" (UID: "dffca359-3f77-47e0-999e-ec7b5d72176d"). InnerVolumeSpecName "kube-api-access-q7rn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.330047 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dffca359-3f77-47e0-999e-ec7b5d72176d" (UID: "dffca359-3f77-47e0-999e-ec7b5d72176d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.374552 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.374620 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dffca359-3f77-47e0-999e-ec7b5d72176d-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:19 crc kubenswrapper[4885]: I0130 00:12:19.374635 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7rn9\" (UniqueName: \"kubernetes.io/projected/dffca359-3f77-47e0-999e-ec7b5d72176d-kube-api-access-q7rn9\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:20 crc kubenswrapper[4885]: I0130 00:12:20.117187 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jmbh5" Jan 30 00:12:20 crc kubenswrapper[4885]: I0130 00:12:20.117577 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n4nxr" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="registry-server" containerID="cri-o://a500c379dbb74ff2b549563d425b93b3eff73eb00bb7f2e0381cb2fb3c21a28c" gracePeriod=2 Jan 30 00:12:20 crc kubenswrapper[4885]: I0130 00:12:20.154966 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jmbh5"] Jan 30 00:12:20 crc kubenswrapper[4885]: I0130 00:12:20.160092 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jmbh5"] Jan 30 00:12:22 crc kubenswrapper[4885]: I0130 00:12:22.150930 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" path="/var/lib/kubelet/pods/dffca359-3f77-47e0-999e-ec7b5d72176d/volumes" Jan 30 00:12:22 crc kubenswrapper[4885]: I0130 00:12:22.899740 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-85ddbf476-446zt"] Jan 30 00:12:22 crc kubenswrapper[4885]: I0130 00:12:22.900067 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" podUID="5a2c48e3-e437-44e2-a16a-4243f19bb405" containerName="controller-manager" containerID="cri-o://545970e3b548272a9ac5cb312e3b89fd37243b1ee6d6dfdaf96712396ddd5fd2" gracePeriod=30 Jan 30 00:12:22 crc kubenswrapper[4885]: I0130 00:12:22.994185 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w"] Jan 30 00:12:22 crc kubenswrapper[4885]: I0130 00:12:22.994434 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" podUID="82414781-e2d1-4742-9758-b2bdd1dc8061" containerName="route-controller-manager" containerID="cri-o://298d822e4b848ebdbc43024d85bb439b289910093f8678f6979c206a6fc04eb9" gracePeriod=30 Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.139600 4885 generic.go:334] "Generic (PLEG): container finished" podID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerID="a500c379dbb74ff2b549563d425b93b3eff73eb00bb7f2e0381cb2fb3c21a28c" exitCode=0 Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.139670 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerDied","Data":"a500c379dbb74ff2b549563d425b93b3eff73eb00bb7f2e0381cb2fb3c21a28c"} Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.594182 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.779641 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jxqt\" (UniqueName: \"kubernetes.io/projected/ab0a81e5-1af3-4340-a412-b0ee0d506468-kube-api-access-4jxqt\") pod \"ab0a81e5-1af3-4340-a412-b0ee0d506468\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.780330 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-catalog-content\") pod \"ab0a81e5-1af3-4340-a412-b0ee0d506468\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.780417 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-utilities\") pod \"ab0a81e5-1af3-4340-a412-b0ee0d506468\" (UID: \"ab0a81e5-1af3-4340-a412-b0ee0d506468\") " Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.781259 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-utilities" (OuterVolumeSpecName: "utilities") pod "ab0a81e5-1af3-4340-a412-b0ee0d506468" (UID: "ab0a81e5-1af3-4340-a412-b0ee0d506468"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.786649 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab0a81e5-1af3-4340-a412-b0ee0d506468-kube-api-access-4jxqt" (OuterVolumeSpecName: "kube-api-access-4jxqt") pod "ab0a81e5-1af3-4340-a412-b0ee0d506468" (UID: "ab0a81e5-1af3-4340-a412-b0ee0d506468"). InnerVolumeSpecName "kube-api-access-4jxqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.803400 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab0a81e5-1af3-4340-a412-b0ee0d506468" (UID: "ab0a81e5-1af3-4340-a412-b0ee0d506468"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.881999 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jxqt\" (UniqueName: \"kubernetes.io/projected/ab0a81e5-1af3-4340-a412-b0ee0d506468-kube-api-access-4jxqt\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.882059 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:23 crc kubenswrapper[4885]: I0130 00:12:23.882081 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0a81e5-1af3-4340-a412-b0ee0d506468-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.152737 4885 generic.go:334] "Generic (PLEG): container finished" podID="82414781-e2d1-4742-9758-b2bdd1dc8061" containerID="298d822e4b848ebdbc43024d85bb439b289910093f8678f6979c206a6fc04eb9" exitCode=0 Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.158023 4885 generic.go:334] "Generic (PLEG): container finished" podID="5a2c48e3-e437-44e2-a16a-4243f19bb405" containerID="545970e3b548272a9ac5cb312e3b89fd37243b1ee6d6dfdaf96712396ddd5fd2" exitCode=0 Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.160795 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n4nxr" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.154025 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" event={"ID":"82414781-e2d1-4742-9758-b2bdd1dc8061","Type":"ContainerDied","Data":"298d822e4b848ebdbc43024d85bb439b289910093f8678f6979c206a6fc04eb9"} Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.163689 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" event={"ID":"5a2c48e3-e437-44e2-a16a-4243f19bb405","Type":"ContainerDied","Data":"545970e3b548272a9ac5cb312e3b89fd37243b1ee6d6dfdaf96712396ddd5fd2"} Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.164079 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n4nxr" event={"ID":"ab0a81e5-1af3-4340-a412-b0ee0d506468","Type":"ContainerDied","Data":"0999419e1a187a61c43efb33e8aa34490d9ba965d47fe7cd4ac75a913272ba86"} Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.164219 4885 scope.go:117] "RemoveContainer" containerID="a500c379dbb74ff2b549563d425b93b3eff73eb00bb7f2e0381cb2fb3c21a28c" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.204470 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n4nxr"] Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.209138 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n4nxr"] Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.238164 4885 scope.go:117] "RemoveContainer" containerID="141e08e7d89ac8e919208c1c7db55ee46fe0f9a6d855c1ed1488159b4e63ecb5" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.270406 4885 scope.go:117] "RemoveContainer" containerID="17dd55955db1d932df42861b7ee36dfdbbe7f323bae8a5deb40222b54ce7eb90" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.307157 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.307396 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.352208 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.636346 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.636433 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.679120 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.936842 4885 patch_prober.go:28] interesting pod/route-controller-manager-7f6f76dccb-kt84w container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Jan 30 00:12:24 crc kubenswrapper[4885]: I0130 00:12:24.937451 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" podUID="82414781-e2d1-4742-9758-b2bdd1dc8061" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.238241 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.251691 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.290402 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.312158 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-config\") pod \"82414781-e2d1-4742-9758-b2bdd1dc8061\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.312289 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82414781-e2d1-4742-9758-b2bdd1dc8061-serving-cert\") pod \"82414781-e2d1-4742-9758-b2bdd1dc8061\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.312329 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfp4d\" (UniqueName: \"kubernetes.io/projected/82414781-e2d1-4742-9758-b2bdd1dc8061-kube-api-access-bfp4d\") pod \"82414781-e2d1-4742-9758-b2bdd1dc8061\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.312424 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-client-ca\") pod \"82414781-e2d1-4742-9758-b2bdd1dc8061\" (UID: \"82414781-e2d1-4742-9758-b2bdd1dc8061\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.313630 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-client-ca" (OuterVolumeSpecName: "client-ca") pod "82414781-e2d1-4742-9758-b2bdd1dc8061" (UID: "82414781-e2d1-4742-9758-b2bdd1dc8061"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.314468 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-config" (OuterVolumeSpecName: "config") pod "82414781-e2d1-4742-9758-b2bdd1dc8061" (UID: "82414781-e2d1-4742-9758-b2bdd1dc8061"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.318392 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82414781-e2d1-4742-9758-b2bdd1dc8061-kube-api-access-bfp4d" (OuterVolumeSpecName: "kube-api-access-bfp4d") pod "82414781-e2d1-4742-9758-b2bdd1dc8061" (UID: "82414781-e2d1-4742-9758-b2bdd1dc8061"). InnerVolumeSpecName "kube-api-access-bfp4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.319897 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82414781-e2d1-4742-9758-b2bdd1dc8061-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "82414781-e2d1-4742-9758-b2bdd1dc8061" (UID: "82414781-e2d1-4742-9758-b2bdd1dc8061"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.413941 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82414781-e2d1-4742-9758-b2bdd1dc8061-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.413990 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfp4d\" (UniqueName: \"kubernetes.io/projected/82414781-e2d1-4742-9758-b2bdd1dc8061-kube-api-access-bfp4d\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.414009 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.414024 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82414781-e2d1-4742-9758-b2bdd1dc8061-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.419481 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.514964 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a2c48e3-e437-44e2-a16a-4243f19bb405-serving-cert\") pod \"5a2c48e3-e437-44e2-a16a-4243f19bb405\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.515015 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc8bq\" (UniqueName: \"kubernetes.io/projected/5a2c48e3-e437-44e2-a16a-4243f19bb405-kube-api-access-kc8bq\") pod \"5a2c48e3-e437-44e2-a16a-4243f19bb405\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.515049 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-config\") pod \"5a2c48e3-e437-44e2-a16a-4243f19bb405\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.515099 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-proxy-ca-bundles\") pod \"5a2c48e3-e437-44e2-a16a-4243f19bb405\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.515146 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-client-ca\") pod \"5a2c48e3-e437-44e2-a16a-4243f19bb405\" (UID: \"5a2c48e3-e437-44e2-a16a-4243f19bb405\") " Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.516206 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5a2c48e3-e437-44e2-a16a-4243f19bb405" (UID: "5a2c48e3-e437-44e2-a16a-4243f19bb405"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.516213 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-client-ca" (OuterVolumeSpecName: "client-ca") pod "5a2c48e3-e437-44e2-a16a-4243f19bb405" (UID: "5a2c48e3-e437-44e2-a16a-4243f19bb405"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.516885 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-config" (OuterVolumeSpecName: "config") pod "5a2c48e3-e437-44e2-a16a-4243f19bb405" (UID: "5a2c48e3-e437-44e2-a16a-4243f19bb405"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.518261 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a2c48e3-e437-44e2-a16a-4243f19bb405-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5a2c48e3-e437-44e2-a16a-4243f19bb405" (UID: "5a2c48e3-e437-44e2-a16a-4243f19bb405"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.519239 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a2c48e3-e437-44e2-a16a-4243f19bb405-kube-api-access-kc8bq" (OuterVolumeSpecName: "kube-api-access-kc8bq") pod "5a2c48e3-e437-44e2-a16a-4243f19bb405" (UID: "5a2c48e3-e437-44e2-a16a-4243f19bb405"). InnerVolumeSpecName "kube-api-access-kc8bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.616979 4885 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a2c48e3-e437-44e2-a16a-4243f19bb405-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.617053 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc8bq\" (UniqueName: \"kubernetes.io/projected/5a2c48e3-e437-44e2-a16a-4243f19bb405-kube-api-access-kc8bq\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.617076 4885 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.617086 4885 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:25 crc kubenswrapper[4885]: I0130 00:12:25.617099 4885 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5a2c48e3-e437-44e2-a16a-4243f19bb405-client-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.152869 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" path="/var/lib/kubelet/pods/ab0a81e5-1af3-4340-a412-b0ee0d506468/volumes" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.184036 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" event={"ID":"82414781-e2d1-4742-9758-b2bdd1dc8061","Type":"ContainerDied","Data":"a0e383bc373242d3ec43807583aa6fa6b42b33be6ae828d077e9c90d23577744"} Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.184131 4885 scope.go:117] "RemoveContainer" containerID="298d822e4b848ebdbc43024d85bb439b289910093f8678f6979c206a6fc04eb9" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.184056 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.186492 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.186795 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85ddbf476-446zt" event={"ID":"5a2c48e3-e437-44e2-a16a-4243f19bb405","Type":"ContainerDied","Data":"c5d30785d4c708f579eaa90a61ebd33cb204c3214f87f50a829639195b06df11"} Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.212464 4885 scope.go:117] "RemoveContainer" containerID="545970e3b548272a9ac5cb312e3b89fd37243b1ee6d6dfdaf96712396ddd5fd2" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.212558 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-85ddbf476-446zt"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.224076 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-85ddbf476-446zt"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.229206 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.231949 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f6f76dccb-kt84w"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.504012 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.505211 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.543447 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.551695 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm"] Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552089 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="extract-content" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552111 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="extract-content" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552121 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="registry-server" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552127 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="registry-server" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552138 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="extract-content" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552164 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="extract-content" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552177 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="extract-utilities" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552183 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="extract-utilities" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552189 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="registry-server" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552195 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="registry-server" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552247 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="extract-utilities" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552254 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="extract-utilities" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552261 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82414781-e2d1-4742-9758-b2bdd1dc8061" containerName="route-controller-manager" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552267 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="82414781-e2d1-4742-9758-b2bdd1dc8061" containerName="route-controller-manager" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.552280 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a2c48e3-e437-44e2-a16a-4243f19bb405" containerName="controller-manager" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552286 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a2c48e3-e437-44e2-a16a-4243f19bb405" containerName="controller-manager" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552416 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="dffca359-3f77-47e0-999e-ec7b5d72176d" containerName="registry-server" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552431 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="82414781-e2d1-4742-9758-b2bdd1dc8061" containerName="route-controller-manager" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552443 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a2c48e3-e437-44e2-a16a-4243f19bb405" containerName="controller-manager" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552451 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab0a81e5-1af3-4340-a412-b0ee0d506468" containerName="registry-server" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.552998 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.555204 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.555941 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.559361 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.559903 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.559954 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.562216 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.565336 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.566786 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.631812 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-proxy-ca-bundles\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.631906 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crssj\" (UniqueName: \"kubernetes.io/projected/da039051-d3c0-4064-8254-6eaba29df134-kube-api-access-crssj\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.631943 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-client-ca\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.632055 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da039051-d3c0-4064-8254-6eaba29df134-serving-cert\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.632154 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-config\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.733064 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-proxy-ca-bundles\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.733139 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crssj\" (UniqueName: \"kubernetes.io/projected/da039051-d3c0-4064-8254-6eaba29df134-kube-api-access-crssj\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.733186 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-client-ca\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.733258 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da039051-d3c0-4064-8254-6eaba29df134-serving-cert\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.733360 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-config\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.734546 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-proxy-ca-bundles\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.737005 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-config\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.738283 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/da039051-d3c0-4064-8254-6eaba29df134-client-ca\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.743836 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da039051-d3c0-4064-8254-6eaba29df134-serving-cert\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.763072 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crssj\" (UniqueName: \"kubernetes.io/projected/da039051-d3c0-4064-8254-6eaba29df134-kube-api-access-crssj\") pod \"controller-manager-6fcdd7f94c-b77tm\" (UID: \"da039051-d3c0-4064-8254-6eaba29df134\") " pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.875017 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.886004 4885 file.go:109] "Unable to process watch event" err="can't process config file \"/etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml\": /etc/kubernetes/manifests/kube-apiserver-startup-monitor-pod.yaml: couldn't parse as pod(Object 'Kind' is missing in 'null'), please check config file" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.888739 4885 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.889724 4885 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.890087 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879" gracePeriod=15 Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.890284 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.890729 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a" gracePeriod=15 Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.890823 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca" gracePeriod=15 Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.890880 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360" gracePeriod=15 Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.891029 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4" gracePeriod=15 Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.891750 4885 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892231 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892250 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892266 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892272 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892278 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892284 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892296 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892304 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892317 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892323 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892332 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892337 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.892344 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892350 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892489 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892509 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892524 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892536 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892544 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.892551 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.942666 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.942748 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.943070 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.943191 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.943261 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.943289 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.943341 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: I0130 00:12:26.943596 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:26 crc kubenswrapper[4885]: E0130 00:12:26.981586 4885 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.213:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045395 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045494 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045539 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045538 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045625 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045569 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045691 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045762 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045824 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045851 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045920 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045585 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045966 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.045553 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.046015 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.046151 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.199929 4885 generic.go:334] "Generic (PLEG): container finished" podID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" containerID="7a9aaf1703b91aa981ba91143618ed1dd0fd8185d2c02d1969154a797fb64b54" exitCode=0 Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.200010 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d94abf5b-36a7-4e56-9f8f-1bbc4f729676","Type":"ContainerDied","Data":"7a9aaf1703b91aa981ba91143618ed1dd0fd8185d2c02d1969154a797fb64b54"} Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.200781 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.201004 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.202441 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.204038 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.204862 4885 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a" exitCode=0 Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.204900 4885 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca" exitCode=0 Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.204916 4885 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360" exitCode=0 Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.204929 4885 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4" exitCode=2 Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.205033 4885 scope.go:117] "RemoveContainer" containerID="b46f856d6048b9e7b48fb6b82a2e069e5f7653bfee52145511886b4f42ffac6e" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.256341 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.258312 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.259099 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.259797 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.282485 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:27 crc kubenswrapper[4885]: E0130 00:12:27.312462 4885 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.213:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f59d46330343c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 00:12:27.311936572 +0000 UTC m=+233.903408320,LastTimestamp:2026-01-30 00:12:27.311936572 +0000 UTC m=+233.903408320,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 00:12:27 crc kubenswrapper[4885]: E0130 00:12:27.475303 4885 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 30 00:12:27 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80" Netns:"/var/run/netns/f7ea7989-e75b-48c7-b5ef-9c92fa5f482c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:27 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:27 crc kubenswrapper[4885]: > Jan 30 00:12:27 crc kubenswrapper[4885]: E0130 00:12:27.475689 4885 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 30 00:12:27 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80" Netns:"/var/run/netns/f7ea7989-e75b-48c7-b5ef-9c92fa5f482c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:27 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:27 crc kubenswrapper[4885]: > pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:27 crc kubenswrapper[4885]: E0130 00:12:27.475709 4885 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Jan 30 00:12:27 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80" Netns:"/var/run/netns/f7ea7989-e75b-48c7-b5ef-9c92fa5f482c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:27 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:27 crc kubenswrapper[4885]: > pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:27 crc kubenswrapper[4885]: E0130 00:12:27.475831 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager(da039051-d3c0-4064-8254-6eaba29df134)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager(da039051-d3c0-4064-8254-6eaba29df134)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80\\\" Netns:\\\"/var/run/netns/f7ea7989-e75b-48c7-b5ef-9c92fa5f482c\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=60c6f107bcb8d1866d0b71b11cdfb2370f2a71ef9e42572c96f70f78fc7d0a80;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s\\\": dial tcp 38.102.83.213:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" podUID="da039051-d3c0-4064-8254-6eaba29df134" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.590363 4885 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.590469 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.924164 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.924265 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.967920 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.968733 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.969359 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.970722 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:27 crc kubenswrapper[4885]: I0130 00:12:27.971118 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.150944 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a2c48e3-e437-44e2-a16a-4243f19bb405" path="/var/lib/kubelet/pods/5a2c48e3-e437-44e2-a16a-4243f19bb405/volumes" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.152848 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82414781-e2d1-4742-9758-b2bdd1dc8061" path="/var/lib/kubelet/pods/82414781-e2d1-4742-9758-b2bdd1dc8061/volumes" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.214713 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"292fbf570f2d15686dc8639a3219da69661cb031b42f02cc3ded5948312459a8"} Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.214821 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0acc669903905c665600f1c30508a5e9f3b64802113ca5f4bfe41f20beea3bb1"} Jan 30 00:12:28 crc kubenswrapper[4885]: E0130 00:12:28.215789 4885 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.213:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.215873 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.216281 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.216708 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.219625 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.220800 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.221247 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.275953 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.276890 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.277430 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.277603 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.505246 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.508141 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.508754 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.509130 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671096 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kubelet-dir\") pod \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671206 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-var-lock\") pod \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671257 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kube-api-access\") pod \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\" (UID: \"d94abf5b-36a7-4e56-9f8f-1bbc4f729676\") " Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671341 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-var-lock" (OuterVolumeSpecName: "var-lock") pod "d94abf5b-36a7-4e56-9f8f-1bbc4f729676" (UID: "d94abf5b-36a7-4e56-9f8f-1bbc4f729676"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671367 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d94abf5b-36a7-4e56-9f8f-1bbc4f729676" (UID: "d94abf5b-36a7-4e56-9f8f-1bbc4f729676"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671535 4885 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-var-lock\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.671548 4885 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.679861 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d94abf5b-36a7-4e56-9f8f-1bbc4f729676" (UID: "d94abf5b-36a7-4e56-9f8f-1bbc4f729676"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:28 crc kubenswrapper[4885]: I0130 00:12:28.773171 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d94abf5b-36a7-4e56-9f8f-1bbc4f729676-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:28 crc kubenswrapper[4885]: E0130 00:12:28.876731 4885 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 30 00:12:28 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762" Netns:"/var/run/netns/c8979f50-4f52-4921-a740-861125aa052c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:28 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:28 crc kubenswrapper[4885]: > Jan 30 00:12:28 crc kubenswrapper[4885]: E0130 00:12:28.876836 4885 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 30 00:12:28 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762" Netns:"/var/run/netns/c8979f50-4f52-4921-a740-861125aa052c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:28 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:28 crc kubenswrapper[4885]: > pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:28 crc kubenswrapper[4885]: E0130 00:12:28.876869 4885 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Jan 30 00:12:28 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762" Netns:"/var/run/netns/c8979f50-4f52-4921-a740-861125aa052c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:28 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:28 crc kubenswrapper[4885]: > pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:28 crc kubenswrapper[4885]: E0130 00:12:28.876950 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager(da039051-d3c0-4064-8254-6eaba29df134)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager(da039051-d3c0-4064-8254-6eaba29df134)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762\\\" Netns:\\\"/var/run/netns/c8979f50-4f52-4921-a740-861125aa052c\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=57b149e48c32e5aac17968817866f5da356da9d9ff3fd223b5852e1108dcf762;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s\\\": dial tcp 38.102.83.213:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" podUID="da039051-d3c0-4064-8254-6eaba29df134" Jan 30 00:12:29 crc kubenswrapper[4885]: E0130 00:12:29.213921 4885 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.213:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f59d46330343c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 00:12:27.311936572 +0000 UTC m=+233.903408320,LastTimestamp:2026-01-30 00:12:27.311936572 +0000 UTC m=+233.903408320,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.230249 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d94abf5b-36a7-4e56-9f8f-1bbc4f729676","Type":"ContainerDied","Data":"aa780c5babaa742deb0bb1ddfd9ac07fd463eaa09a2508d39fe2e4de8581b5be"} Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.230317 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa780c5babaa742deb0bb1ddfd9ac07fd463eaa09a2508d39fe2e4de8581b5be" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.230399 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.237990 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.239166 4885 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879" exitCode=0 Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.260956 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.261612 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.261946 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.264370 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.265406 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.266064 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.266506 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.267184 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.267465 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.284592 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285077 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.284730 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.284883 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerName="oauth-openshift" containerID="cri-o://dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181" gracePeriod=15 Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285145 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285177 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285204 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285524 4885 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285545 4885 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.285559 4885 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.667057 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.668537 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.669213 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.669915 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.670286 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.670625 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791670 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-router-certs\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791788 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-service-ca\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791844 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-ocp-branding-template\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791885 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-cliconfig\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791922 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-login\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791944 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6l7xv\" (UniqueName: \"kubernetes.io/projected/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-kube-api-access-6l7xv\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791963 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-policies\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.791996 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-serving-cert\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.792021 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-session\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.792059 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-provider-selection\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793145 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793350 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793460 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-trusted-ca-bundle\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793491 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-dir\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793519 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-error\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793550 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-idp-0-file-data\") pod \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\" (UID: \"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03\") " Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793561 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793716 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793729 4885 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793741 4885 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.793935 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.794216 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.801136 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.801486 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-kube-api-access-6l7xv" (OuterVolumeSpecName: "kube-api-access-6l7xv") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "kube-api-access-6l7xv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.801503 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.802289 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.802389 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.802886 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.803159 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.805859 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.805737 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" (UID: "dd0aa96a-341c-4bb1-82cc-6a7766a0cb03"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.896178 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.896539 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.896645 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.896730 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.896832 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.896979 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6l7xv\" (UniqueName: \"kubernetes.io/projected/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-kube-api-access-6l7xv\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.897060 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.897141 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.897243 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.897344 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:29 crc kubenswrapper[4885]: I0130 00:12:29.897442 4885 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.165902 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 30 00:12:30 crc kubenswrapper[4885]: E0130 00:12:30.202273 4885 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.213:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" volumeName="registry-storage" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.252358 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.254721 4885 scope.go:117] "RemoveContainer" containerID="10356a433fd408a9068e9abaef9949cd47b38369433c0345455b764907dd392a" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.255272 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.255866 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.256236 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.256690 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.257165 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.257817 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.257981 4885 generic.go:334] "Generic (PLEG): container finished" podID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerID="dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181" exitCode=0 Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.258037 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" event={"ID":"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03","Type":"ContainerDied","Data":"dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181"} Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.258054 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.258077 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" event={"ID":"dd0aa96a-341c-4bb1-82cc-6a7766a0cb03","Type":"ContainerDied","Data":"005bf459db7735f819ce1abad74c9a580479375a5b887120e816aa6cb8d4fc18"} Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.258753 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.259094 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.259306 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.259619 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.260020 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.260470 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.260676 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.260930 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.262362 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.262578 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.263427 4885 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.263701 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.264088 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.264348 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.264596 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.278257 4885 scope.go:117] "RemoveContainer" containerID="613d0dc7e78480c4e44c7515878d71cad05af300798b774cdc830f658b4b3eca" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.298606 4885 scope.go:117] "RemoveContainer" containerID="1df6f484d22cb3b2c3c24edb74a462c8bb53ccf6eaedefaba2fd778a72731360" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.316029 4885 scope.go:117] "RemoveContainer" containerID="928deb00416ffee4b3705f23c8c22ed87b48375a31edc73ce624df447a5d6ce4" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.331432 4885 scope.go:117] "RemoveContainer" containerID="bb3a5707bd76e438e9d43015cfe8ac6cc7e9f20969d65a00a1dd3eefa51d7879" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.352605 4885 scope.go:117] "RemoveContainer" containerID="a8a4f768030e76dbbd7281804433170a0c70ff04407cd847950a59808e317d98" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.381100 4885 scope.go:117] "RemoveContainer" containerID="dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.403537 4885 scope.go:117] "RemoveContainer" containerID="dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181" Jan 30 00:12:30 crc kubenswrapper[4885]: E0130 00:12:30.404697 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181\": container with ID starting with dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181 not found: ID does not exist" containerID="dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181" Jan 30 00:12:30 crc kubenswrapper[4885]: I0130 00:12:30.404746 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181"} err="failed to get container status \"dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181\": rpc error: code = NotFound desc = could not find container \"dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181\": container with ID starting with dc0b7da3f0654c20b01873156a07faeb2f7769d8b272518607783f16cc33a181 not found: ID does not exist" Jan 30 00:12:33 crc kubenswrapper[4885]: E0130 00:12:33.917940 4885 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:33 crc kubenswrapper[4885]: E0130 00:12:33.919310 4885 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:33 crc kubenswrapper[4885]: E0130 00:12:33.920590 4885 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:33 crc kubenswrapper[4885]: E0130 00:12:33.921362 4885 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:33 crc kubenswrapper[4885]: E0130 00:12:33.921946 4885 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:33 crc kubenswrapper[4885]: I0130 00:12:33.922020 4885 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 30 00:12:33 crc kubenswrapper[4885]: E0130 00:12:33.922407 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" interval="200ms" Jan 30 00:12:34 crc kubenswrapper[4885]: E0130 00:12:34.123871 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" interval="400ms" Jan 30 00:12:34 crc kubenswrapper[4885]: I0130 00:12:34.149891 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:34 crc kubenswrapper[4885]: I0130 00:12:34.150393 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:34 crc kubenswrapper[4885]: I0130 00:12:34.150877 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:34 crc kubenswrapper[4885]: I0130 00:12:34.151401 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:34 crc kubenswrapper[4885]: E0130 00:12:34.525208 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" interval="800ms" Jan 30 00:12:35 crc kubenswrapper[4885]: E0130 00:12:35.327228 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" interval="1.6s" Jan 30 00:12:36 crc kubenswrapper[4885]: E0130 00:12:36.928152 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" interval="3.2s" Jan 30 00:12:39 crc kubenswrapper[4885]: E0130 00:12:39.215601 4885 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.213:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188f59d46330343c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-30 00:12:27.311936572 +0000 UTC m=+233.903408320,LastTimestamp:2026-01-30 00:12:27.311936572 +0000 UTC m=+233.903408320,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 30 00:12:40 crc kubenswrapper[4885]: E0130 00:12:40.129090 4885 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.213:6443: connect: connection refused" interval="6.4s" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.329459 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.330041 4885 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c" exitCode=1 Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.330085 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c"} Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.330813 4885 scope.go:117] "RemoveContainer" containerID="71a698d36c8a7240561363c1fe2c5371a8771c7258184e377737f6a40df73d4c" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.331922 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.332509 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.333513 4885 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.334108 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.334435 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:41 crc kubenswrapper[4885]: I0130 00:12:41.416629 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.141043 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.142602 4885 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.143200 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.143650 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.144450 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.145141 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.158598 4885 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.158663 4885 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:42 crc kubenswrapper[4885]: E0130 00:12:42.159436 4885 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.160103 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:42 crc kubenswrapper[4885]: W0130 00:12:42.185903 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-b345c948b04b967ee71e02c66a72f97daa1d2aa1385afed279648deb899abb67 WatchSource:0}: Error finding container b345c948b04b967ee71e02c66a72f97daa1d2aa1385afed279648deb899abb67: Status 404 returned error can't find the container with id b345c948b04b967ee71e02c66a72f97daa1d2aa1385afed279648deb899abb67 Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.342254 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b345c948b04b967ee71e02c66a72f97daa1d2aa1385afed279648deb899abb67"} Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.347007 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.347121 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e1d83e57895c33bb07edfac8e00b79ba1b5f262443f57650df979b23509a45a9"} Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.348508 4885 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.349201 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.349683 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.351187 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:42 crc kubenswrapper[4885]: I0130 00:12:42.352018 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.141933 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.143010 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.356152 4885 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="8096b375a05df7c6b87f48494004290ed4d61ff98256fd2fb0e0ed15fc2e6c76" exitCode=0 Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.356256 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"8096b375a05df7c6b87f48494004290ed4d61ff98256fd2fb0e0ed15fc2e6c76"} Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.356517 4885 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.356547 4885 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.357065 4885 status_manager.go:851] "Failed to get status for pod" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" pod="openshift-marketplace/redhat-operators-wng4c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wng4c\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:43 crc kubenswrapper[4885]: E0130 00:12:43.357159 4885 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.357485 4885 status_manager.go:851] "Failed to get status for pod" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" pod="openshift-authentication/oauth-openshift-558db77b4-p87kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-p87kw\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.358221 4885 status_manager.go:851] "Failed to get status for pod" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" pod="openshift-marketplace/redhat-marketplace-mfdx7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-mfdx7\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.358888 4885 status_manager.go:851] "Failed to get status for pod" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:43 crc kubenswrapper[4885]: I0130 00:12:43.359382 4885 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.213:6443: connect: connection refused" Jan 30 00:12:43 crc kubenswrapper[4885]: E0130 00:12:43.661272 4885 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 30 00:12:43 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589" Netns:"/var/run/netns/6e738234-e3d7-4112-8c9d-8565619372ad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:43 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:43 crc kubenswrapper[4885]: > Jan 30 00:12:43 crc kubenswrapper[4885]: E0130 00:12:43.661791 4885 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 30 00:12:43 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589" Netns:"/var/run/netns/6e738234-e3d7-4112-8c9d-8565619372ad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:43 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:43 crc kubenswrapper[4885]: > pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:43 crc kubenswrapper[4885]: E0130 00:12:43.661812 4885 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Jan 30 00:12:43 crc kubenswrapper[4885]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589" Netns:"/var/run/netns/6e738234-e3d7-4112-8c9d-8565619372ad" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134" Path:"" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s": dial tcp 38.102.83.213:6443: connect: connection refused Jan 30 00:12:43 crc kubenswrapper[4885]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 30 00:12:43 crc kubenswrapper[4885]: > pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:43 crc kubenswrapper[4885]: E0130 00:12:43.661881 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager(da039051-d3c0-4064-8254-6eaba29df134)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager(da039051-d3c0-4064-8254-6eaba29df134)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_controller-manager-6fcdd7f94c-b77tm_openshift-controller-manager_da039051-d3c0-4064-8254-6eaba29df134_0(b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589): error adding pod openshift-controller-manager_controller-manager-6fcdd7f94c-b77tm to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589\\\" Netns:\\\"/var/run/netns/6e738234-e3d7-4112-8c9d-8565619372ad\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-controller-manager;K8S_POD_NAME=controller-manager-6fcdd7f94c-b77tm;K8S_POD_INFRA_CONTAINER_ID=b505e2dc3e9b23ae4435037445c0a25f158403a011cb64557e4cd034df287589;K8S_POD_UID=da039051-d3c0-4064-8254-6eaba29df134\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm] networking: Multus: [openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm/da039051-d3c0-4064-8254-6eaba29df134]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: SetNetworkStatus: failed to update the pod controller-manager-6fcdd7f94c-b77tm in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-6fcdd7f94c-b77tm?timeout=1m0s\\\": dial tcp 38.102.83.213:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" podUID="da039051-d3c0-4064-8254-6eaba29df134" Jan 30 00:12:44 crc kubenswrapper[4885]: I0130 00:12:44.369798 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"97af346b3d86edd82e826e9f7d84a155532e02186e29629736f86ca7bc43043a"} Jan 30 00:12:45 crc kubenswrapper[4885]: I0130 00:12:45.377835 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fcf23970e64c247c7e373c02a893ef1524af815cfb1af4c59130e2c1d8881d31"} Jan 30 00:12:45 crc kubenswrapper[4885]: I0130 00:12:45.378270 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7cfa2c428ef6dba0cc7c5e2da40c97f17492274283c9415c4ee744609042e673"} Jan 30 00:12:46 crc kubenswrapper[4885]: I0130 00:12:46.388058 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3cd376a809de8f051ed7d016a2cb5e24f88a1885905ef36b5ed0f438b6891d76"} Jan 30 00:12:46 crc kubenswrapper[4885]: I0130 00:12:46.388523 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:46 crc kubenswrapper[4885]: I0130 00:12:46.388540 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2f1c392a3302f1ade42dcf39d3b6b351bde745cd169fdcf675a2c62a98cf8a7b"} Jan 30 00:12:46 crc kubenswrapper[4885]: I0130 00:12:46.390261 4885 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:46 crc kubenswrapper[4885]: I0130 00:12:46.390367 4885 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:47 crc kubenswrapper[4885]: I0130 00:12:47.160364 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:47 crc kubenswrapper[4885]: I0130 00:12:47.160456 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:47 crc kubenswrapper[4885]: I0130 00:12:47.172034 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:50 crc kubenswrapper[4885]: I0130 00:12:50.525753 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:12:51 crc kubenswrapper[4885]: I0130 00:12:51.402606 4885 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:51 crc kubenswrapper[4885]: I0130 00:12:51.416504 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:12:51 crc kubenswrapper[4885]: I0130 00:12:51.417041 4885 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 00:12:51 crc kubenswrapper[4885]: I0130 00:12:51.417179 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 00:12:52 crc kubenswrapper[4885]: I0130 00:12:52.168266 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:12:52 crc kubenswrapper[4885]: I0130 00:12:52.173406 4885 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7fbe76d6-38ab-43aa-80d5-9304d364c3b3" Jan 30 00:12:52 crc kubenswrapper[4885]: I0130 00:12:52.431336 4885 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:52 crc kubenswrapper[4885]: I0130 00:12:52.431404 4885 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:53 crc kubenswrapper[4885]: I0130 00:12:53.436159 4885 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:53 crc kubenswrapper[4885]: I0130 00:12:53.436441 4885 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:12:54 crc kubenswrapper[4885]: I0130 00:12:54.170337 4885 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="7fbe76d6-38ab-43aa-80d5-9304d364c3b3" Jan 30 00:12:54 crc kubenswrapper[4885]: I0130 00:12:54.175689 4885 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","podab0a81e5-1af3-4340-a412-b0ee0d506468"] err="unable to destroy cgroup paths for cgroup [kubepods burstable podab0a81e5-1af3-4340-a412-b0ee0d506468] : Timed out while waiting for systemd to remove kubepods-burstable-podab0a81e5_1af3_4340_a412_b0ee0d506468.slice" Jan 30 00:12:57 crc kubenswrapper[4885]: I0130 00:12:57.141949 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:57 crc kubenswrapper[4885]: I0130 00:12:57.143502 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:57 crc kubenswrapper[4885]: I0130 00:12:57.466686 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" event={"ID":"da039051-d3c0-4064-8254-6eaba29df134","Type":"ContainerStarted","Data":"a719835aaee071934dacccda24391eb370012666e1e3b7dd47dfcf89e4165964"} Jan 30 00:12:58 crc kubenswrapper[4885]: I0130 00:12:58.475302 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" event={"ID":"da039051-d3c0-4064-8254-6eaba29df134","Type":"ContainerStarted","Data":"f7da2634c54e3b3fab3f3d9fce658557a296879a310c420041997f1ef46fff4e"} Jan 30 00:12:58 crc kubenswrapper[4885]: I0130 00:12:58.475724 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:12:58 crc kubenswrapper[4885]: I0130 00:12:58.481220 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.041123 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.417280 4885 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.417414 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.458446 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.511062 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.695976 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 30 00:13:01 crc kubenswrapper[4885]: I0130 00:13:01.921214 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.006654 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.257988 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.454842 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.458086 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.481017 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.492133 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.493879 4885 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.521165 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.686439 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 30 00:13:02 crc kubenswrapper[4885]: I0130 00:13:02.777449 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.080734 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.143063 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.394585 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.533296 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.589889 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.735133 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.828278 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.906218 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 30 00:13:03 crc kubenswrapper[4885]: I0130 00:13:03.982008 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.023549 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.057575 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.093252 4885 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.098728 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm" podStartSLOduration=42.09869487 podStartE2EDuration="42.09869487s" podCreationTimestamp="2026-01-30 00:12:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:12:58.498282071 +0000 UTC m=+265.089753819" watchObservedRunningTime="2026-01-30 00:13:04.09869487 +0000 UTC m=+270.690166658" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.102558 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-p87kw","openshift-kube-apiserver/kube-apiserver-crc"] Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.102670 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs"] Jan 30 00:13:04 crc kubenswrapper[4885]: E0130 00:13:04.103095 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" containerName="installer" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.103130 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" containerName="installer" Jan 30 00:13:04 crc kubenswrapper[4885]: E0130 00:13:04.103175 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerName="oauth-openshift" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.103191 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerName="oauth-openshift" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.103234 4885 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.103258 4885 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="57dad425-1427-4159-b1dc-4991186f30f8" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.103386 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" containerName="oauth-openshift" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.103404 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="d94abf5b-36a7-4e56-9f8f-1bbc4f729676" containerName="installer" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.104119 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6fcdd7f94c-b77tm"] Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.104388 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.108421 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.109405 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.109555 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.109891 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.109965 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.110025 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.109965 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.143127 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=13.143098106 podStartE2EDuration="13.143098106s" podCreationTimestamp="2026-01-30 00:12:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:13:04.139005427 +0000 UTC m=+270.730477205" watchObservedRunningTime="2026-01-30 00:13:04.143098106 +0000 UTC m=+270.734569864" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.159255 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0aa96a-341c-4bb1-82cc-6a7766a0cb03" path="/var/lib/kubelet/pods/dd0aa96a-341c-4bb1-82cc-6a7766a0cb03/volumes" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.245164 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f16f8def-9d4b-4f2f-953a-4e06e44a81df-serving-cert\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.245433 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndvcx\" (UniqueName: \"kubernetes.io/projected/f16f8def-9d4b-4f2f-953a-4e06e44a81df-kube-api-access-ndvcx\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.245476 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f16f8def-9d4b-4f2f-953a-4e06e44a81df-client-ca\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.245530 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f16f8def-9d4b-4f2f-953a-4e06e44a81df-config\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.317281 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.346481 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndvcx\" (UniqueName: \"kubernetes.io/projected/f16f8def-9d4b-4f2f-953a-4e06e44a81df-kube-api-access-ndvcx\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.346538 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f16f8def-9d4b-4f2f-953a-4e06e44a81df-client-ca\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.346613 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f16f8def-9d4b-4f2f-953a-4e06e44a81df-config\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.346658 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f16f8def-9d4b-4f2f-953a-4e06e44a81df-serving-cert\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.347638 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f16f8def-9d4b-4f2f-953a-4e06e44a81df-client-ca\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.348954 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f16f8def-9d4b-4f2f-953a-4e06e44a81df-config\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.357393 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.357702 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f16f8def-9d4b-4f2f-953a-4e06e44a81df-serving-cert\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.374170 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndvcx\" (UniqueName: \"kubernetes.io/projected/f16f8def-9d4b-4f2f-953a-4e06e44a81df-kube-api-access-ndvcx\") pod \"route-controller-manager-7b5bc7c588-fjgvs\" (UID: \"f16f8def-9d4b-4f2f-953a-4e06e44a81df\") " pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.434361 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.486667 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.538976 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.578488 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.593397 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.608124 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.635895 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.654435 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.711025 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.714819 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.784815 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.890754 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.893716 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 30 00:13:04 crc kubenswrapper[4885]: I0130 00:13:04.991457 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.030322 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.053334 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.064409 4885 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.084077 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.139377 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.196065 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.396113 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.439343 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.572873 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.633302 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.642150 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.662526 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.859089 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.885691 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 30 00:13:05 crc kubenswrapper[4885]: I0130 00:13:05.976572 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.120062 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.143269 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.270691 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.286219 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.303519 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.336498 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.353282 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.375318 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.394687 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.424346 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.429120 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.502660 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.503033 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs"] Jan 30 00:13:06 crc kubenswrapper[4885]: W0130 00:13:06.515132 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf16f8def_9d4b_4f2f_953a_4e06e44a81df.slice/crio-3f992367866cd10df592b6582a0abaf7a48374a98a6a2210ebf144128471130b WatchSource:0}: Error finding container 3f992367866cd10df592b6582a0abaf7a48374a98a6a2210ebf144128471130b: Status 404 returned error can't find the container with id 3f992367866cd10df592b6582a0abaf7a48374a98a6a2210ebf144128471130b Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.525270 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" event={"ID":"f16f8def-9d4b-4f2f-953a-4e06e44a81df","Type":"ContainerStarted","Data":"3f992367866cd10df592b6582a0abaf7a48374a98a6a2210ebf144128471130b"} Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.584647 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.611979 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.615436 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.732045 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.793588 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.812180 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.878287 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.887212 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.906504 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.989492 4885 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 30 00:13:06 crc kubenswrapper[4885]: I0130 00:13:06.998566 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.000786 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.042487 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.097347 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.206094 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.243130 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.313570 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.383038 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.452420 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.484058 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.518934 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.535113 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" event={"ID":"f16f8def-9d4b-4f2f-953a-4e06e44a81df","Type":"ContainerStarted","Data":"56fb927d1e475219c911e085c5e55665bdf1bc7046d5711d5bd1413b396322fa"} Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.535722 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.560814 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" podStartSLOduration=44.56079089 podStartE2EDuration="44.56079089s" podCreationTimestamp="2026-01-30 00:12:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:13:07.555981401 +0000 UTC m=+274.147453189" watchObservedRunningTime="2026-01-30 00:13:07.56079089 +0000 UTC m=+274.152262638" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.580983 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.724013 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.812562 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.824365 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.830414 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.858019 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 30 00:13:07 crc kubenswrapper[4885]: I0130 00:13:07.956966 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.103902 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.140748 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.166305 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.282668 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.380893 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.504192 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.518895 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.535961 4885 patch_prober.go:28] interesting pod/route-controller-manager-7b5bc7c588-fjgvs container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.536042 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" podUID="f16f8def-9d4b-4f2f-953a-4e06e44a81df" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.570574 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.632687 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.645455 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.705794 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.769728 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.816652 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.858832 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.895223 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.931033 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 30 00:13:08 crc kubenswrapper[4885]: I0130 00:13:08.966738 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.055162 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.157928 4885 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.251671 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.289589 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.293949 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.359903 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.371677 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.434337 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.465176 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.494041 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.504452 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.525556 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.542245 4885 patch_prober.go:28] interesting pod/route-controller-manager-7b5bc7c588-fjgvs container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.542334 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" podUID="f16f8def-9d4b-4f2f-953a-4e06e44a81df" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.578021 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.691518 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.714214 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.766585 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.819204 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.826253 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.842972 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 00:13:09 crc kubenswrapper[4885]: I0130 00:13:09.895756 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.018684 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.018790 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.144571 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.187349 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.257409 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.337881 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.459358 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.498247 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.546760 4885 patch_prober.go:28] interesting pod/route-controller-manager-7b5bc7c588-fjgvs container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.546893 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" podUID="f16f8def-9d4b-4f2f-953a-4e06e44a81df" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.695197 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.740449 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.798636 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.806661 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.910508 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 30 00:13:10 crc kubenswrapper[4885]: I0130 00:13:10.973579 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.129569 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.172183 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.181320 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.298588 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.346731 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.387468 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.433348 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.442203 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.499726 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.661375 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.729145 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.815168 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.864287 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.900012 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.908566 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 30 00:13:11 crc kubenswrapper[4885]: I0130 00:13:11.936644 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.008634 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.058449 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.085874 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.097943 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.131860 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.183617 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.281858 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.528426 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.563890 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.622652 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.640198 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.676686 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.721359 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.723723 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.768736 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.805152 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.828913 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.867481 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.918153 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 30 00:13:12 crc kubenswrapper[4885]: I0130 00:13:12.945390 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.199037 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.213085 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.236643 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.274974 4885 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.275391 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://292fbf570f2d15686dc8639a3219da69661cb031b42f02cc3ded5948312459a8" gracePeriod=5 Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.291235 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.357369 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.404941 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.451800 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.455639 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.555151 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.603506 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 30 00:13:13 crc kubenswrapper[4885]: I0130 00:13:13.894828 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.052556 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.161297 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.217878 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.230655 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.335341 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.400146 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.441668 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b5bc7c588-fjgvs" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.489178 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.489264 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bw6zc"] Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.489570 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bw6zc" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="registry-server" containerID="cri-o://ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" gracePeriod=2 Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.526479 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 30 00:13:14 crc kubenswrapper[4885]: E0130 00:13:14.637175 4885 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41 is running failed: container process not found" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 00:13:14 crc kubenswrapper[4885]: E0130 00:13:14.637678 4885 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41 is running failed: container process not found" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 00:13:14 crc kubenswrapper[4885]: E0130 00:13:14.638128 4885 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41 is running failed: container process not found" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 00:13:14 crc kubenswrapper[4885]: E0130 00:13:14.638220 4885 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-bw6zc" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="registry-server" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.674096 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.807351 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.807592 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.897079 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.922255 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 30 00:13:14 crc kubenswrapper[4885]: I0130 00:13:14.926715 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.040222 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.061731 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.097658 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktsg6\" (UniqueName: \"kubernetes.io/projected/9af08248-ac20-4708-8753-bd2d97ad46a6-kube-api-access-ktsg6\") pod \"9af08248-ac20-4708-8753-bd2d97ad46a6\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.097729 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-catalog-content\") pod \"9af08248-ac20-4708-8753-bd2d97ad46a6\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.097802 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-utilities\") pod \"9af08248-ac20-4708-8753-bd2d97ad46a6\" (UID: \"9af08248-ac20-4708-8753-bd2d97ad46a6\") " Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.099270 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-utilities" (OuterVolumeSpecName: "utilities") pod "9af08248-ac20-4708-8753-bd2d97ad46a6" (UID: "9af08248-ac20-4708-8753-bd2d97ad46a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.105066 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9af08248-ac20-4708-8753-bd2d97ad46a6-kube-api-access-ktsg6" (OuterVolumeSpecName: "kube-api-access-ktsg6") pod "9af08248-ac20-4708-8753-bd2d97ad46a6" (UID: "9af08248-ac20-4708-8753-bd2d97ad46a6"). InnerVolumeSpecName "kube-api-access-ktsg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.157448 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9af08248-ac20-4708-8753-bd2d97ad46a6" (UID: "9af08248-ac20-4708-8753-bd2d97ad46a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.199458 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktsg6\" (UniqueName: \"kubernetes.io/projected/9af08248-ac20-4708-8753-bd2d97ad46a6-kube-api-access-ktsg6\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.199490 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.199500 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9af08248-ac20-4708-8753-bd2d97ad46a6-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.303081 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.334571 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.584306 4885 generic.go:334] "Generic (PLEG): container finished" podID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" exitCode=0 Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.584368 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bw6zc" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.584373 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bw6zc" event={"ID":"9af08248-ac20-4708-8753-bd2d97ad46a6","Type":"ContainerDied","Data":"ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41"} Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.584417 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bw6zc" event={"ID":"9af08248-ac20-4708-8753-bd2d97ad46a6","Type":"ContainerDied","Data":"c9a58a5c56d508e715dc745c1120116951bb43b61342ea2c3b9af9f5e752c799"} Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.584441 4885 scope.go:117] "RemoveContainer" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.611866 4885 scope.go:117] "RemoveContainer" containerID="67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.629176 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bw6zc"] Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.634814 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bw6zc"] Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.638106 4885 scope.go:117] "RemoveContainer" containerID="741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.665833 4885 scope.go:117] "RemoveContainer" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" Jan 30 00:13:15 crc kubenswrapper[4885]: E0130 00:13:15.666319 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41\": container with ID starting with ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41 not found: ID does not exist" containerID="ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.666353 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41"} err="failed to get container status \"ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41\": rpc error: code = NotFound desc = could not find container \"ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41\": container with ID starting with ce3ec2641c6301928f85fd726707acdd68d2bbc20fd995d8fc35ecb9aeb82a41 not found: ID does not exist" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.666377 4885 scope.go:117] "RemoveContainer" containerID="67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc" Jan 30 00:13:15 crc kubenswrapper[4885]: E0130 00:13:15.666698 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc\": container with ID starting with 67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc not found: ID does not exist" containerID="67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.666718 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc"} err="failed to get container status \"67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc\": rpc error: code = NotFound desc = could not find container \"67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc\": container with ID starting with 67be97a003880e5218654afd7913d928cf7c0a23891c50a4d8185b3b4bc98fcc not found: ID does not exist" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.666731 4885 scope.go:117] "RemoveContainer" containerID="741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c" Jan 30 00:13:15 crc kubenswrapper[4885]: E0130 00:13:15.667085 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c\": container with ID starting with 741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c not found: ID does not exist" containerID="741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.667110 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c"} err="failed to get container status \"741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c\": rpc error: code = NotFound desc = could not find container \"741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c\": container with ID starting with 741d5b2dbf0d6c75668ad929340cb444c80cb99e1cc405f7ba299d007db3af8c not found: ID does not exist" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.727146 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.748415 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 30 00:13:15 crc kubenswrapper[4885]: I0130 00:13:15.753186 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 30 00:13:16 crc kubenswrapper[4885]: I0130 00:13:16.024124 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 30 00:13:16 crc kubenswrapper[4885]: I0130 00:13:16.032071 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 30 00:13:16 crc kubenswrapper[4885]: I0130 00:13:16.156763 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" path="/var/lib/kubelet/pods/9af08248-ac20-4708-8753-bd2d97ad46a6/volumes" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.613987 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.614355 4885 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="292fbf570f2d15686dc8639a3219da69661cb031b42f02cc3ded5948312459a8" exitCode=137 Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.849747 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.849867 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976499 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976545 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976575 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976604 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976628 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976693 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976720 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976758 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.976787 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.977004 4885 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.977027 4885 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.977042 4885 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.977054 4885 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:18 crc kubenswrapper[4885]: I0130 00:13:18.989219 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:13:19 crc kubenswrapper[4885]: I0130 00:13:19.078358 4885 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 30 00:13:19 crc kubenswrapper[4885]: I0130 00:13:19.622118 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 30 00:13:19 crc kubenswrapper[4885]: I0130 00:13:19.622490 4885 scope.go:117] "RemoveContainer" containerID="292fbf570f2d15686dc8639a3219da69661cb031b42f02cc3ded5948312459a8" Jan 30 00:13:19 crc kubenswrapper[4885]: I0130 00:13:19.622578 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 30 00:13:20 crc kubenswrapper[4885]: I0130 00:13:20.148712 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.605688 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-f77784dbc-d4wjb"] Jan 30 00:13:21 crc kubenswrapper[4885]: E0130 00:13:21.606151 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="registry-server" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.606175 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="registry-server" Jan 30 00:13:21 crc kubenswrapper[4885]: E0130 00:13:21.606200 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.606213 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 00:13:21 crc kubenswrapper[4885]: E0130 00:13:21.606237 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="extract-content" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.606250 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="extract-content" Jan 30 00:13:21 crc kubenswrapper[4885]: E0130 00:13:21.606270 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="extract-utilities" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.606283 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="extract-utilities" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.606455 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.606477 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="9af08248-ac20-4708-8753-bd2d97ad46a6" containerName="registry-server" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.607236 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.609857 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.610272 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.611743 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.612409 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.612710 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.612857 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.613431 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.613564 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.614032 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.614163 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.614551 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.614684 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618139 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618216 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618256 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618290 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618363 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-router-certs\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618391 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/616a67fc-1236-4c55-a573-efc3ab5d7adf-audit-dir\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618463 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-service-ca\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618491 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmc7c\" (UniqueName: \"kubernetes.io/projected/616a67fc-1236-4c55-a573-efc3ab5d7adf-kube-api-access-wmc7c\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618523 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-session\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618557 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618593 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-error\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618627 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618754 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-login\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.618875 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-audit-policies\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.621590 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f77784dbc-d4wjb"] Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.631854 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.631935 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.644170 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720085 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720176 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720222 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720252 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720293 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-router-certs\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720331 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/616a67fc-1236-4c55-a573-efc3ab5d7adf-audit-dir\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720364 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-service-ca\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720396 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmc7c\" (UniqueName: \"kubernetes.io/projected/616a67fc-1236-4c55-a573-efc3ab5d7adf-kube-api-access-wmc7c\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720421 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-session\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720652 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720684 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-error\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720717 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720753 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-login\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.720823 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-audit-policies\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.721066 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/616a67fc-1236-4c55-a573-efc3ab5d7adf-audit-dir\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.721794 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-audit-policies\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.722532 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-service-ca\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.723078 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.723840 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.725607 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.726442 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-router-certs\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.727875 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.727881 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.729117 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.729378 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-system-session\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.730411 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-login\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.731408 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/616a67fc-1236-4c55-a573-efc3ab5d7adf-v4-0-config-user-template-error\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.748652 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmc7c\" (UniqueName: \"kubernetes.io/projected/616a67fc-1236-4c55-a573-efc3ab5d7adf-kube-api-access-wmc7c\") pod \"oauth-openshift-f77784dbc-d4wjb\" (UID: \"616a67fc-1236-4c55-a573-efc3ab5d7adf\") " pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:21 crc kubenswrapper[4885]: I0130 00:13:21.939504 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:22 crc kubenswrapper[4885]: I0130 00:13:22.204359 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f77784dbc-d4wjb"] Jan 30 00:13:22 crc kubenswrapper[4885]: W0130 00:13:22.212261 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod616a67fc_1236_4c55_a573_efc3ab5d7adf.slice/crio-b57b1afe9485d6a241c38da2879b4015faacb715b44a48578755655c149efeb4 WatchSource:0}: Error finding container b57b1afe9485d6a241c38da2879b4015faacb715b44a48578755655c149efeb4: Status 404 returned error can't find the container with id b57b1afe9485d6a241c38da2879b4015faacb715b44a48578755655c149efeb4 Jan 30 00:13:22 crc kubenswrapper[4885]: I0130 00:13:22.641981 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" event={"ID":"616a67fc-1236-4c55-a573-efc3ab5d7adf","Type":"ContainerStarted","Data":"fa6432711dcd0e40512df095441b31c6e982dc5f5e601e65253d0328edeb82bc"} Jan 30 00:13:22 crc kubenswrapper[4885]: I0130 00:13:22.642348 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:22 crc kubenswrapper[4885]: I0130 00:13:22.642371 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" event={"ID":"616a67fc-1236-4c55-a573-efc3ab5d7adf","Type":"ContainerStarted","Data":"b57b1afe9485d6a241c38da2879b4015faacb715b44a48578755655c149efeb4"} Jan 30 00:13:22 crc kubenswrapper[4885]: I0130 00:13:22.675323 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" podStartSLOduration=78.675305937 podStartE2EDuration="1m18.675305937s" podCreationTimestamp="2026-01-30 00:12:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:13:22.670983872 +0000 UTC m=+289.262455630" watchObservedRunningTime="2026-01-30 00:13:22.675305937 +0000 UTC m=+289.266777685" Jan 30 00:13:22 crc kubenswrapper[4885]: I0130 00:13:22.871511 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-f77784dbc-d4wjb" Jan 30 00:13:30 crc kubenswrapper[4885]: I0130 00:13:30.483836 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 30 00:13:31 crc kubenswrapper[4885]: I0130 00:13:31.617743 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 30 00:13:31 crc kubenswrapper[4885]: I0130 00:13:31.729242 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 30 00:13:32 crc kubenswrapper[4885]: I0130 00:13:32.149254 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 30 00:13:33 crc kubenswrapper[4885]: I0130 00:13:33.580744 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 30 00:13:33 crc kubenswrapper[4885]: I0130 00:13:33.720034 4885 generic.go:334] "Generic (PLEG): container finished" podID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerID="6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c" exitCode=0 Jan 30 00:13:33 crc kubenswrapper[4885]: I0130 00:13:33.720109 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" event={"ID":"a345d711-84e3-47c8-a255-f833dfaca7fa","Type":"ContainerDied","Data":"6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c"} Jan 30 00:13:33 crc kubenswrapper[4885]: I0130 00:13:33.721032 4885 scope.go:117] "RemoveContainer" containerID="6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c" Jan 30 00:13:33 crc kubenswrapper[4885]: I0130 00:13:33.751394 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 30 00:13:33 crc kubenswrapper[4885]: I0130 00:13:33.906368 4885 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 30 00:13:34 crc kubenswrapper[4885]: I0130 00:13:34.292848 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 30 00:13:34 crc kubenswrapper[4885]: I0130 00:13:34.534613 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 30 00:13:34 crc kubenswrapper[4885]: I0130 00:13:34.729589 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" event={"ID":"a345d711-84e3-47c8-a255-f833dfaca7fa","Type":"ContainerStarted","Data":"fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15"} Jan 30 00:13:34 crc kubenswrapper[4885]: I0130 00:13:34.730383 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:13:34 crc kubenswrapper[4885]: I0130 00:13:34.734018 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:13:35 crc kubenswrapper[4885]: I0130 00:13:35.028405 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 30 00:13:37 crc kubenswrapper[4885]: I0130 00:13:37.613946 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 30 00:13:38 crc kubenswrapper[4885]: I0130 00:13:38.186285 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 30 00:13:39 crc kubenswrapper[4885]: I0130 00:13:39.688451 4885 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 30 00:13:43 crc kubenswrapper[4885]: I0130 00:13:43.254631 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 00:13:46 crc kubenswrapper[4885]: I0130 00:13:46.581564 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 30 00:13:47 crc kubenswrapper[4885]: I0130 00:13:47.143493 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 30 00:13:54 crc kubenswrapper[4885]: I0130 00:13:54.156761 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.112277 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wng4c"] Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.114438 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wng4c" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="registry-server" containerID="cri-o://ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9" gracePeriod=2 Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.564223 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.721972 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vzbr\" (UniqueName: \"kubernetes.io/projected/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-kube-api-access-9vzbr\") pod \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.722067 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-utilities\") pod \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.722107 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-catalog-content\") pod \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\" (UID: \"7f0d9e5f-67f9-4f87-8546-8e12d68513e9\") " Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.723571 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-utilities" (OuterVolumeSpecName: "utilities") pod "7f0d9e5f-67f9-4f87-8546-8e12d68513e9" (UID: "7f0d9e5f-67f9-4f87-8546-8e12d68513e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.735061 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-kube-api-access-9vzbr" (OuterVolumeSpecName: "kube-api-access-9vzbr") pod "7f0d9e5f-67f9-4f87-8546-8e12d68513e9" (UID: "7f0d9e5f-67f9-4f87-8546-8e12d68513e9"). InnerVolumeSpecName "kube-api-access-9vzbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.824087 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vzbr\" (UniqueName: \"kubernetes.io/projected/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-kube-api-access-9vzbr\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.824167 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.854136 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f0d9e5f-67f9-4f87-8546-8e12d68513e9" (UID: "7f0d9e5f-67f9-4f87-8546-8e12d68513e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:14 crc kubenswrapper[4885]: I0130 00:14:14.925177 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f0d9e5f-67f9-4f87-8546-8e12d68513e9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.008100 4885 generic.go:334] "Generic (PLEG): container finished" podID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerID="ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9" exitCode=0 Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.008174 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerDied","Data":"ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9"} Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.008211 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wng4c" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.008242 4885 scope.go:117] "RemoveContainer" containerID="ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.008223 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wng4c" event={"ID":"7f0d9e5f-67f9-4f87-8546-8e12d68513e9","Type":"ContainerDied","Data":"b5ea85e4fea80ed8d1711329022d891ae49e96be0d3373e5c95ed4048a7712ff"} Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.037924 4885 scope.go:117] "RemoveContainer" containerID="b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.059436 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wng4c"] Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.068640 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wng4c"] Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.074425 4885 scope.go:117] "RemoveContainer" containerID="45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.117883 4885 scope.go:117] "RemoveContainer" containerID="ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9" Jan 30 00:14:15 crc kubenswrapper[4885]: E0130 00:14:15.118960 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9\": container with ID starting with ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9 not found: ID does not exist" containerID="ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.119002 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9"} err="failed to get container status \"ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9\": rpc error: code = NotFound desc = could not find container \"ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9\": container with ID starting with ef9b2b3136c4247dc8ba7f1e38251bea3ae0b10a25b23c7aeb884460114af0f9 not found: ID does not exist" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.119033 4885 scope.go:117] "RemoveContainer" containerID="b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88" Jan 30 00:14:15 crc kubenswrapper[4885]: E0130 00:14:15.119859 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88\": container with ID starting with b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88 not found: ID does not exist" containerID="b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.119957 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88"} err="failed to get container status \"b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88\": rpc error: code = NotFound desc = could not find container \"b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88\": container with ID starting with b9e50f0787cc89dd40de252c4499c95101909ab0a59e2b4dd4b160921bb60b88 not found: ID does not exist" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.120030 4885 scope.go:117] "RemoveContainer" containerID="45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140" Jan 30 00:14:15 crc kubenswrapper[4885]: E0130 00:14:15.120611 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140\": container with ID starting with 45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140 not found: ID does not exist" containerID="45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140" Jan 30 00:14:15 crc kubenswrapper[4885]: I0130 00:14:15.120650 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140"} err="failed to get container status \"45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140\": rpc error: code = NotFound desc = could not find container \"45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140\": container with ID starting with 45d7aa30895933135f6344e5c44a682b3007fba757c068721b190877a7563140 not found: ID does not exist" Jan 30 00:14:16 crc kubenswrapper[4885]: I0130 00:14:16.155856 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" path="/var/lib/kubelet/pods/7f0d9e5f-67f9-4f87-8546-8e12d68513e9/volumes" Jan 30 00:14:30 crc kubenswrapper[4885]: I0130 00:14:30.143811 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:14:30 crc kubenswrapper[4885]: I0130 00:14:30.144791 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.398624 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fsw8p"] Jan 30 00:14:44 crc kubenswrapper[4885]: E0130 00:14:44.399759 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="extract-content" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.399844 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="extract-content" Jan 30 00:14:44 crc kubenswrapper[4885]: E0130 00:14:44.399972 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="extract-utilities" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.399991 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="extract-utilities" Jan 30 00:14:44 crc kubenswrapper[4885]: E0130 00:14:44.400015 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="registry-server" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.400026 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="registry-server" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.400210 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f0d9e5f-67f9-4f87-8546-8e12d68513e9" containerName="registry-server" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.401621 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.424728 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fsw8p"] Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.460453 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-bound-sa-token\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.460841 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.460916 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bbf200b-8793-4f08-be26-9788bb261da6-trusted-ca\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.460961 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bbf200b-8793-4f08-be26-9788bb261da6-registry-certificates\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.461007 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9wnd\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-kube-api-access-z9wnd\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.461057 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bbf200b-8793-4f08-be26-9788bb261da6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.461090 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-registry-tls\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.461119 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bbf200b-8793-4f08-be26-9788bb261da6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.489173 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562449 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bbf200b-8793-4f08-be26-9788bb261da6-trusted-ca\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562513 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bbf200b-8793-4f08-be26-9788bb261da6-registry-certificates\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562554 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9wnd\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-kube-api-access-z9wnd\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562592 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bbf200b-8793-4f08-be26-9788bb261da6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562658 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-registry-tls\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562693 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bbf200b-8793-4f08-be26-9788bb261da6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.562723 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-bound-sa-token\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.563730 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9bbf200b-8793-4f08-be26-9788bb261da6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.564027 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9bbf200b-8793-4f08-be26-9788bb261da6-trusted-ca\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.564479 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9bbf200b-8793-4f08-be26-9788bb261da6-registry-certificates\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.572348 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9bbf200b-8793-4f08-be26-9788bb261da6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.572942 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-registry-tls\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.585502 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9wnd\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-kube-api-access-z9wnd\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.597860 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9bbf200b-8793-4f08-be26-9788bb261da6-bound-sa-token\") pod \"image-registry-66df7c8f76-fsw8p\" (UID: \"9bbf200b-8793-4f08-be26-9788bb261da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:44 crc kubenswrapper[4885]: I0130 00:14:44.727168 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:45 crc kubenswrapper[4885]: I0130 00:14:45.205326 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fsw8p"] Jan 30 00:14:45 crc kubenswrapper[4885]: W0130 00:14:45.217842 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bbf200b_8793_4f08_be26_9788bb261da6.slice/crio-6d704b488aaa14da1cc660e7c96abd246048a1b50c36f4f54325fa13e82f5291 WatchSource:0}: Error finding container 6d704b488aaa14da1cc660e7c96abd246048a1b50c36f4f54325fa13e82f5291: Status 404 returned error can't find the container with id 6d704b488aaa14da1cc660e7c96abd246048a1b50c36f4f54325fa13e82f5291 Jan 30 00:14:46 crc kubenswrapper[4885]: I0130 00:14:46.215533 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" event={"ID":"9bbf200b-8793-4f08-be26-9788bb261da6","Type":"ContainerStarted","Data":"b928a0d5e246b6d419d4d5f12c6c38641748f08d06d405a8dca2675359ee3a18"} Jan 30 00:14:46 crc kubenswrapper[4885]: I0130 00:14:46.216018 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:14:46 crc kubenswrapper[4885]: I0130 00:14:46.216043 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" event={"ID":"9bbf200b-8793-4f08-be26-9788bb261da6","Type":"ContainerStarted","Data":"6d704b488aaa14da1cc660e7c96abd246048a1b50c36f4f54325fa13e82f5291"} Jan 30 00:14:46 crc kubenswrapper[4885]: I0130 00:14:46.242103 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" podStartSLOduration=2.242073442 podStartE2EDuration="2.242073442s" podCreationTimestamp="2026-01-30 00:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:14:46.234958838 +0000 UTC m=+372.826430676" watchObservedRunningTime="2026-01-30 00:14:46.242073442 +0000 UTC m=+372.833545220" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.195579 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v5294"] Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.196589 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v5294" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="registry-server" containerID="cri-o://c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" gracePeriod=30 Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.212741 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7fh97"] Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.213270 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7fh97" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="registry-server" containerID="cri-o://3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af" gracePeriod=30 Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.231933 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mfxlj"] Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.232345 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" containerID="cri-o://fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15" gracePeriod=30 Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.249254 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfdx7"] Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.249545 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mfdx7" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="registry-server" containerID="cri-o://f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560" gracePeriod=30 Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.265684 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8628w"] Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.266652 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.274484 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gd928"] Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.274880 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gd928" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="registry-server" containerID="cri-o://cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8" gracePeriod=30 Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.277011 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8628w"] Jan 30 00:14:54 crc kubenswrapper[4885]: E0130 00:14:54.305591 4885 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f is running failed: container process not found" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 00:14:54 crc kubenswrapper[4885]: E0130 00:14:54.306533 4885 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f is running failed: container process not found" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 00:14:54 crc kubenswrapper[4885]: E0130 00:14:54.307186 4885 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f is running failed: container process not found" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" cmd=["grpc_health_probe","-addr=:50051"] Jan 30 00:14:54 crc kubenswrapper[4885]: E0130 00:14:54.307293 4885 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-v5294" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="registry-server" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.396589 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8hrw\" (UniqueName: \"kubernetes.io/projected/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-kube-api-access-w8hrw\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.396662 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.396705 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.498084 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.498438 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8hrw\" (UniqueName: \"kubernetes.io/projected/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-kube-api-access-w8hrw\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.498486 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.499664 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.513452 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.516395 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8hrw\" (UniqueName: \"kubernetes.io/projected/1fcfed07-3bc7-4be4-8f00-fb268c4b7821-kube-api-access-w8hrw\") pod \"marketplace-operator-79b997595-8628w\" (UID: \"1fcfed07-3bc7-4be4-8f00-fb268c4b7821\") " pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.669366 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.676954 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.679115 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.682573 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.784098 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802497 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkn5v\" (UniqueName: \"kubernetes.io/projected/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-kube-api-access-mkn5v\") pod \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802552 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-utilities\") pod \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802574 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-utilities\") pod \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802649 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz8hz\" (UniqueName: \"kubernetes.io/projected/c1ac97f9-b076-40c9-80fc-a2f6111d313b-kube-api-access-nz8hz\") pod \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802720 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-catalog-content\") pod \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\" (UID: \"c1ac97f9-b076-40c9-80fc-a2f6111d313b\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802749 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-catalog-content\") pod \"14fd8cd4-0faa-45da-a532-9528073cfe8e\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802811 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-utilities\") pod \"14fd8cd4-0faa-45da-a532-9528073cfe8e\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802904 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr8bd\" (UniqueName: \"kubernetes.io/projected/14fd8cd4-0faa-45da-a532-9528073cfe8e-kube-api-access-lr8bd\") pod \"14fd8cd4-0faa-45da-a532-9528073cfe8e\" (UID: \"14fd8cd4-0faa-45da-a532-9528073cfe8e\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.802953 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-catalog-content\") pod \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\" (UID: \"fdd3cba5-cf61-40cd-8c88-d289887fbf8a\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.804906 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-utilities" (OuterVolumeSpecName: "utilities") pod "c1ac97f9-b076-40c9-80fc-a2f6111d313b" (UID: "c1ac97f9-b076-40c9-80fc-a2f6111d313b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.807740 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-utilities" (OuterVolumeSpecName: "utilities") pod "14fd8cd4-0faa-45da-a532-9528073cfe8e" (UID: "14fd8cd4-0faa-45da-a532-9528073cfe8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.808236 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-kube-api-access-mkn5v" (OuterVolumeSpecName: "kube-api-access-mkn5v") pod "fdd3cba5-cf61-40cd-8c88-d289887fbf8a" (UID: "fdd3cba5-cf61-40cd-8c88-d289887fbf8a"). InnerVolumeSpecName "kube-api-access-mkn5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.809549 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-utilities" (OuterVolumeSpecName: "utilities") pod "fdd3cba5-cf61-40cd-8c88-d289887fbf8a" (UID: "fdd3cba5-cf61-40cd-8c88-d289887fbf8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.821655 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.822120 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14fd8cd4-0faa-45da-a532-9528073cfe8e-kube-api-access-lr8bd" (OuterVolumeSpecName: "kube-api-access-lr8bd") pod "14fd8cd4-0faa-45da-a532-9528073cfe8e" (UID: "14fd8cd4-0faa-45da-a532-9528073cfe8e"). InnerVolumeSpecName "kube-api-access-lr8bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.848932 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ac97f9-b076-40c9-80fc-a2f6111d313b-kube-api-access-nz8hz" (OuterVolumeSpecName: "kube-api-access-nz8hz") pod "c1ac97f9-b076-40c9-80fc-a2f6111d313b" (UID: "c1ac97f9-b076-40c9-80fc-a2f6111d313b"). InnerVolumeSpecName "kube-api-access-nz8hz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.861156 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14fd8cd4-0faa-45da-a532-9528073cfe8e" (UID: "14fd8cd4-0faa-45da-a532-9528073cfe8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.871895 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1ac97f9-b076-40c9-80fc-a2f6111d313b" (UID: "c1ac97f9-b076-40c9-80fc-a2f6111d313b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.883227 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fdd3cba5-cf61-40cd-8c88-d289887fbf8a" (UID: "fdd3cba5-cf61-40cd-8c88-d289887fbf8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905405 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-utilities\") pod \"faedfaad-6883-471e-9a4e-d15cc6b969d7\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905514 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6xsj\" (UniqueName: \"kubernetes.io/projected/faedfaad-6883-471e-9a4e-d15cc6b969d7-kube-api-access-n6xsj\") pod \"faedfaad-6883-471e-9a4e-d15cc6b969d7\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905624 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-catalog-content\") pod \"faedfaad-6883-471e-9a4e-d15cc6b969d7\" (UID: \"faedfaad-6883-471e-9a4e-d15cc6b969d7\") " Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905907 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905932 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905946 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14fd8cd4-0faa-45da-a532-9528073cfe8e-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905957 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr8bd\" (UniqueName: \"kubernetes.io/projected/14fd8cd4-0faa-45da-a532-9528073cfe8e-kube-api-access-lr8bd\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905969 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905981 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkn5v\" (UniqueName: \"kubernetes.io/projected/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-kube-api-access-mkn5v\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905990 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdd3cba5-cf61-40cd-8c88-d289887fbf8a-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.905999 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1ac97f9-b076-40c9-80fc-a2f6111d313b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.906009 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz8hz\" (UniqueName: \"kubernetes.io/projected/c1ac97f9-b076-40c9-80fc-a2f6111d313b-kube-api-access-nz8hz\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.906634 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-utilities" (OuterVolumeSpecName: "utilities") pod "faedfaad-6883-471e-9a4e-d15cc6b969d7" (UID: "faedfaad-6883-471e-9a4e-d15cc6b969d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.909395 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faedfaad-6883-471e-9a4e-d15cc6b969d7-kube-api-access-n6xsj" (OuterVolumeSpecName: "kube-api-access-n6xsj") pod "faedfaad-6883-471e-9a4e-d15cc6b969d7" (UID: "faedfaad-6883-471e-9a4e-d15cc6b969d7"). InnerVolumeSpecName "kube-api-access-n6xsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:14:54 crc kubenswrapper[4885]: I0130 00:14:54.923534 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8628w"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.006720 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-trusted-ca\") pod \"a345d711-84e3-47c8-a255-f833dfaca7fa\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.007294 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxjkf\" (UniqueName: \"kubernetes.io/projected/a345d711-84e3-47c8-a255-f833dfaca7fa-kube-api-access-wxjkf\") pod \"a345d711-84e3-47c8-a255-f833dfaca7fa\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.007326 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-operator-metrics\") pod \"a345d711-84e3-47c8-a255-f833dfaca7fa\" (UID: \"a345d711-84e3-47c8-a255-f833dfaca7fa\") " Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.007503 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a345d711-84e3-47c8-a255-f833dfaca7fa" (UID: "a345d711-84e3-47c8-a255-f833dfaca7fa"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.008237 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.008264 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6xsj\" (UniqueName: \"kubernetes.io/projected/faedfaad-6883-471e-9a4e-d15cc6b969d7-kube-api-access-n6xsj\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.008275 4885 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.012116 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a345d711-84e3-47c8-a255-f833dfaca7fa" (UID: "a345d711-84e3-47c8-a255-f833dfaca7fa"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.012802 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a345d711-84e3-47c8-a255-f833dfaca7fa-kube-api-access-wxjkf" (OuterVolumeSpecName: "kube-api-access-wxjkf") pod "a345d711-84e3-47c8-a255-f833dfaca7fa" (UID: "a345d711-84e3-47c8-a255-f833dfaca7fa"). InnerVolumeSpecName "kube-api-access-wxjkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.049576 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "faedfaad-6883-471e-9a4e-d15cc6b969d7" (UID: "faedfaad-6883-471e-9a4e-d15cc6b969d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.109453 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxjkf\" (UniqueName: \"kubernetes.io/projected/a345d711-84e3-47c8-a255-f833dfaca7fa-kube-api-access-wxjkf\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.109497 4885 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a345d711-84e3-47c8-a255-f833dfaca7fa-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.109511 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faedfaad-6883-471e-9a4e-d15cc6b969d7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.277090 4885 generic.go:334] "Generic (PLEG): container finished" podID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerID="fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15" exitCode=0 Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.277144 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.277208 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" event={"ID":"a345d711-84e3-47c8-a255-f833dfaca7fa","Type":"ContainerDied","Data":"fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.277258 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mfxlj" event={"ID":"a345d711-84e3-47c8-a255-f833dfaca7fa","Type":"ContainerDied","Data":"13d16105c3ffd7b319e1258860a37fd66f32b7ac34b8684aa782a79b1dd489c8"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.277285 4885 scope.go:117] "RemoveContainer" containerID="fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.279274 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" event={"ID":"1fcfed07-3bc7-4be4-8f00-fb268c4b7821","Type":"ContainerStarted","Data":"4e1be8ff230f9d7faa4c5baf2ecb635bf35e699ad41435de6dced66294467d4f"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.279316 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" event={"ID":"1fcfed07-3bc7-4be4-8f00-fb268c4b7821","Type":"ContainerStarted","Data":"0cbd4e1d1fcad3187cf9bb4abfdd251d0a177bac01822d904647a9eb44e123cf"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.280107 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.281269 4885 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8628w container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.68:8080/healthz\": dial tcp 10.217.0.68:8080: connect: connection refused" start-of-body= Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.281307 4885 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" podUID="1fcfed07-3bc7-4be4-8f00-fb268c4b7821" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.68:8080/healthz\": dial tcp 10.217.0.68:8080: connect: connection refused" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.282003 4885 generic.go:334] "Generic (PLEG): container finished" podID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" exitCode=0 Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.282058 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerDied","Data":"c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.282065 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v5294" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.282076 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v5294" event={"ID":"c1ac97f9-b076-40c9-80fc-a2f6111d313b","Type":"ContainerDied","Data":"3b328d643a5da0b6bac284c8fdfb987e1e89821b68d126b0a6734cc2d2825a11"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.283577 4885 generic.go:334] "Generic (PLEG): container finished" podID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerID="f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560" exitCode=0 Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.283624 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerDied","Data":"f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.283644 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfdx7" event={"ID":"14fd8cd4-0faa-45da-a532-9528073cfe8e","Type":"ContainerDied","Data":"5b9f8b2a017bd629c04e3b234667aacbe1250444a1d3a8c02d491c2a31d1b0f2"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.283699 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfdx7" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.287689 4885 generic.go:334] "Generic (PLEG): container finished" podID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerID="cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8" exitCode=0 Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.287807 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gd928" event={"ID":"faedfaad-6883-471e-9a4e-d15cc6b969d7","Type":"ContainerDied","Data":"cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.287844 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gd928" event={"ID":"faedfaad-6883-471e-9a4e-d15cc6b969d7","Type":"ContainerDied","Data":"7093931cbcca622e8bc123e559cef3063867a4488c83e548536f8c2bcbdf57e3"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.287857 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gd928" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.292050 4885 generic.go:334] "Generic (PLEG): container finished" podID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerID="3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af" exitCode=0 Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.292107 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fh97" event={"ID":"fdd3cba5-cf61-40cd-8c88-d289887fbf8a","Type":"ContainerDied","Data":"3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.292144 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fh97" event={"ID":"fdd3cba5-cf61-40cd-8c88-d289887fbf8a","Type":"ContainerDied","Data":"7825c062eb2df92c593ab36796bf8355fe0204ec930bd0e30504bd2f40964021"} Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.292225 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fh97" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.311739 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" podStartSLOduration=1.311714474 podStartE2EDuration="1.311714474s" podCreationTimestamp="2026-01-30 00:14:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:14:55.30420384 +0000 UTC m=+381.895675588" watchObservedRunningTime="2026-01-30 00:14:55.311714474 +0000 UTC m=+381.903186222" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.312197 4885 scope.go:117] "RemoveContainer" containerID="6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.407703 4885 scope.go:117] "RemoveContainer" containerID="fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.408550 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15\": container with ID starting with fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15 not found: ID does not exist" containerID="fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.408603 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15"} err="failed to get container status \"fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15\": rpc error: code = NotFound desc = could not find container \"fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15\": container with ID starting with fd6f7c890fc709d1ae8cd5e56ce35a935df28b010154600842885a496ae3ae15 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.408648 4885 scope.go:117] "RemoveContainer" containerID="6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.409133 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c\": container with ID starting with 6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c not found: ID does not exist" containerID="6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.409176 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c"} err="failed to get container status \"6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c\": rpc error: code = NotFound desc = could not find container \"6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c\": container with ID starting with 6032deeddc43ceab6510c33b1dbef7ad2723f0954390a032a43a52c0192f1a8c not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.409204 4885 scope.go:117] "RemoveContainer" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.409420 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfdx7"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.414024 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfdx7"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.433449 4885 scope.go:117] "RemoveContainer" containerID="1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.442860 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v5294"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.457317 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v5294"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.465515 4885 scope.go:117] "RemoveContainer" containerID="fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.466954 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mfxlj"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.477813 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mfxlj"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.481035 4885 scope.go:117] "RemoveContainer" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.481437 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f\": container with ID starting with c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f not found: ID does not exist" containerID="c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.481486 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f"} err="failed to get container status \"c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f\": rpc error: code = NotFound desc = could not find container \"c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f\": container with ID starting with c47d747c5673776e931481c1618a4d1f2dc5eae57c50d9bebdaf59428ed93c6f not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.481522 4885 scope.go:117] "RemoveContainer" containerID="1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.481945 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56\": container with ID starting with 1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56 not found: ID does not exist" containerID="1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.481986 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56"} err="failed to get container status \"1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56\": rpc error: code = NotFound desc = could not find container \"1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56\": container with ID starting with 1f1d8ad0778c62040ec7c66ddf3d32878278c2d209cca2154148a049a2df4e56 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.482013 4885 scope.go:117] "RemoveContainer" containerID="fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.482387 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c\": container with ID starting with fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c not found: ID does not exist" containerID="fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.482418 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c"} err="failed to get container status \"fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c\": rpc error: code = NotFound desc = could not find container \"fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c\": container with ID starting with fde44b5918e18a3d9b07f724feef05ebfcbcd3c512c474eef165441c0e86021c not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.482436 4885 scope.go:117] "RemoveContainer" containerID="f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.484463 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7fh97"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.491970 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7fh97"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.496976 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gd928"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.500331 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gd928"] Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.503821 4885 scope.go:117] "RemoveContainer" containerID="9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.522877 4885 scope.go:117] "RemoveContainer" containerID="aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.540582 4885 scope.go:117] "RemoveContainer" containerID="f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.541119 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560\": container with ID starting with f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560 not found: ID does not exist" containerID="f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.541171 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560"} err="failed to get container status \"f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560\": rpc error: code = NotFound desc = could not find container \"f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560\": container with ID starting with f2bfaf33f740563349e88f1a29d1fc1712be88fbbf13ad880c023a1f43fa7560 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.541215 4885 scope.go:117] "RemoveContainer" containerID="9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.541916 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a\": container with ID starting with 9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a not found: ID does not exist" containerID="9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.541968 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a"} err="failed to get container status \"9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a\": rpc error: code = NotFound desc = could not find container \"9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a\": container with ID starting with 9b12c4ebdc4666dd464659f1cbf6752d184579024a105cf65e4dd49053949b7a not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.542010 4885 scope.go:117] "RemoveContainer" containerID="aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.543808 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487\": container with ID starting with aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487 not found: ID does not exist" containerID="aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.543878 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487"} err="failed to get container status \"aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487\": rpc error: code = NotFound desc = could not find container \"aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487\": container with ID starting with aff87a71de6cae6546a95cc578421de1d8a48dd86f8200b7abab0d6092065487 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.543924 4885 scope.go:117] "RemoveContainer" containerID="cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.565212 4885 scope.go:117] "RemoveContainer" containerID="598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.580721 4885 scope.go:117] "RemoveContainer" containerID="582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.608711 4885 scope.go:117] "RemoveContainer" containerID="cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.609422 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8\": container with ID starting with cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8 not found: ID does not exist" containerID="cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.609464 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8"} err="failed to get container status \"cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8\": rpc error: code = NotFound desc = could not find container \"cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8\": container with ID starting with cbbbab32edb42b7f316c1a84e4a8aa19ff6c768177f1bbdc06371a2d9b9798c8 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.609498 4885 scope.go:117] "RemoveContainer" containerID="598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.610066 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2\": container with ID starting with 598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2 not found: ID does not exist" containerID="598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.610220 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2"} err="failed to get container status \"598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2\": rpc error: code = NotFound desc = could not find container \"598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2\": container with ID starting with 598a1159349b664fa2fdf5b3082cd95cb899b3746c318cc793a5cd74f58c0bf2 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.610340 4885 scope.go:117] "RemoveContainer" containerID="582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.610844 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4\": container with ID starting with 582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4 not found: ID does not exist" containerID="582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.610885 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4"} err="failed to get container status \"582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4\": rpc error: code = NotFound desc = could not find container \"582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4\": container with ID starting with 582cf37bc5c0da7c67ab281f7606bbafc7b03d380b06ccfe5b4172b98f6e47a4 not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.610921 4885 scope.go:117] "RemoveContainer" containerID="3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.629864 4885 scope.go:117] "RemoveContainer" containerID="b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.651297 4885 scope.go:117] "RemoveContainer" containerID="eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.667262 4885 scope.go:117] "RemoveContainer" containerID="3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.667741 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af\": container with ID starting with 3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af not found: ID does not exist" containerID="3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.667829 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af"} err="failed to get container status \"3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af\": rpc error: code = NotFound desc = could not find container \"3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af\": container with ID starting with 3cdbdd6f61793c9de0250b35733d8beb128471927b6d2996288d415e316391af not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.667890 4885 scope.go:117] "RemoveContainer" containerID="b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.668290 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d\": container with ID starting with b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d not found: ID does not exist" containerID="b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.668327 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d"} err="failed to get container status \"b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d\": rpc error: code = NotFound desc = could not find container \"b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d\": container with ID starting with b7592d4755c18b55b3253e368fc6a98aed8ffa114bdaa4bcf75e3c723949a62d not found: ID does not exist" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.668357 4885 scope.go:117] "RemoveContainer" containerID="eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa" Jan 30 00:14:55 crc kubenswrapper[4885]: E0130 00:14:55.668722 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa\": container with ID starting with eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa not found: ID does not exist" containerID="eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa" Jan 30 00:14:55 crc kubenswrapper[4885]: I0130 00:14:55.668795 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa"} err="failed to get container status \"eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa\": rpc error: code = NotFound desc = could not find container \"eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa\": container with ID starting with eb7c69075c2d455b3ec53366cc3f598a292ec3124794537038084f1a097244fa not found: ID does not exist" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.152406 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" path="/var/lib/kubelet/pods/14fd8cd4-0faa-45da-a532-9528073cfe8e/volumes" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.154509 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" path="/var/lib/kubelet/pods/a345d711-84e3-47c8-a255-f833dfaca7fa/volumes" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.156192 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" path="/var/lib/kubelet/pods/c1ac97f9-b076-40c9-80fc-a2f6111d313b/volumes" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.159328 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" path="/var/lib/kubelet/pods/faedfaad-6883-471e-9a4e-d15cc6b969d7/volumes" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.160699 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" path="/var/lib/kubelet/pods/fdd3cba5-cf61-40cd-8c88-d289887fbf8a/volumes" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208130 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p6vph"] Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208391 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208408 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208418 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208425 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208435 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208442 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208453 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208460 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208473 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208480 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208487 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208494 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208504 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208510 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208518 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208525 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208535 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208542 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208553 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208561 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208568 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208575 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="extract-utilities" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208582 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208588 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208597 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208602 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="extract-content" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208693 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdd3cba5-cf61-40cd-8c88-d289887fbf8a" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208705 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="faedfaad-6883-471e-9a4e-d15cc6b969d7" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208714 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="14fd8cd4-0faa-45da-a532-9528073cfe8e" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208723 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ac97f9-b076-40c9-80fc-a2f6111d313b" containerName="registry-server" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208733 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208741 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" Jan 30 00:14:56 crc kubenswrapper[4885]: E0130 00:14:56.208862 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.208871 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="a345d711-84e3-47c8-a255-f833dfaca7fa" containerName="marketplace-operator" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.209531 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.212529 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.232899 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6vph"] Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.315279 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8628w" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.340798 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-catalog-content\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.340839 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-utilities\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.340874 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfsxq\" (UniqueName: \"kubernetes.io/projected/ae480a27-919c-4a53-9a19-646d9af18fa9-kube-api-access-pfsxq\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.442536 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-utilities\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.442592 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-catalog-content\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.442652 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfsxq\" (UniqueName: \"kubernetes.io/projected/ae480a27-919c-4a53-9a19-646d9af18fa9-kube-api-access-pfsxq\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.443254 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-utilities\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.443636 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-catalog-content\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.464387 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfsxq\" (UniqueName: \"kubernetes.io/projected/ae480a27-919c-4a53-9a19-646d9af18fa9-kube-api-access-pfsxq\") pod \"redhat-marketplace-p6vph\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.555900 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.812588 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qj7dd"] Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.814647 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.815623 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qj7dd"] Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.847989 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.952757 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-utilities\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.952852 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-catalog-content\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.952912 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v65v\" (UniqueName: \"kubernetes.io/projected/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-kube-api-access-8v65v\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:56 crc kubenswrapper[4885]: I0130 00:14:56.975850 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6vph"] Jan 30 00:14:56 crc kubenswrapper[4885]: W0130 00:14:56.995887 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae480a27_919c_4a53_9a19_646d9af18fa9.slice/crio-bae29355a90aba06ea8e6396e9fca74c352a7b027b3bb457b70d28058ba62a9b WatchSource:0}: Error finding container bae29355a90aba06ea8e6396e9fca74c352a7b027b3bb457b70d28058ba62a9b: Status 404 returned error can't find the container with id bae29355a90aba06ea8e6396e9fca74c352a7b027b3bb457b70d28058ba62a9b Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.054030 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-utilities\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.054093 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-catalog-content\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.054136 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v65v\" (UniqueName: \"kubernetes.io/projected/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-kube-api-access-8v65v\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.054818 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-utilities\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.055310 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-catalog-content\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.080110 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v65v\" (UniqueName: \"kubernetes.io/projected/f8a63fc0-9c8d-4318-a69c-5d6463f40f0d-kube-api-access-8v65v\") pod \"certified-operators-qj7dd\" (UID: \"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d\") " pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.166993 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.317114 4885 generic.go:334] "Generic (PLEG): container finished" podID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerID="07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736" exitCode=0 Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.317268 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6vph" event={"ID":"ae480a27-919c-4a53-9a19-646d9af18fa9","Type":"ContainerDied","Data":"07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736"} Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.317476 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6vph" event={"ID":"ae480a27-919c-4a53-9a19-646d9af18fa9","Type":"ContainerStarted","Data":"bae29355a90aba06ea8e6396e9fca74c352a7b027b3bb457b70d28058ba62a9b"} Jan 30 00:14:57 crc kubenswrapper[4885]: I0130 00:14:57.566809 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qj7dd"] Jan 30 00:14:57 crc kubenswrapper[4885]: W0130 00:14:57.579932 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8a63fc0_9c8d_4318_a69c_5d6463f40f0d.slice/crio-5090c7cdd9f0b74bfb0aa4239b17887c78d99b5005d27795999c03a9a8b66301 WatchSource:0}: Error finding container 5090c7cdd9f0b74bfb0aa4239b17887c78d99b5005d27795999c03a9a8b66301: Status 404 returned error can't find the container with id 5090c7cdd9f0b74bfb0aa4239b17887c78d99b5005d27795999c03a9a8b66301 Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.325210 4885 generic.go:334] "Generic (PLEG): container finished" podID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerID="a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054" exitCode=0 Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.325279 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6vph" event={"ID":"ae480a27-919c-4a53-9a19-646d9af18fa9","Type":"ContainerDied","Data":"a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054"} Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.329910 4885 generic.go:334] "Generic (PLEG): container finished" podID="f8a63fc0-9c8d-4318-a69c-5d6463f40f0d" containerID="8b4bd3e8e3457840aabd3c22890fd998ebf074768bccf8fd4b8389e9755e24b3" exitCode=0 Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.330989 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qj7dd" event={"ID":"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d","Type":"ContainerDied","Data":"8b4bd3e8e3457840aabd3c22890fd998ebf074768bccf8fd4b8389e9755e24b3"} Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.331209 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qj7dd" event={"ID":"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d","Type":"ContainerStarted","Data":"5090c7cdd9f0b74bfb0aa4239b17887c78d99b5005d27795999c03a9a8b66301"} Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.617584 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9vzzw"] Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.618930 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.622010 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.625246 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9vzzw"] Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.783297 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-utilities\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.783345 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkscw\" (UniqueName: \"kubernetes.io/projected/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-kube-api-access-tkscw\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.783391 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-catalog-content\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.885603 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-catalog-content\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.885795 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-utilities\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.885862 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkscw\" (UniqueName: \"kubernetes.io/projected/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-kube-api-access-tkscw\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.886844 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-catalog-content\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.886989 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-utilities\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.910184 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkscw\" (UniqueName: \"kubernetes.io/projected/d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145-kube-api-access-tkscw\") pod \"redhat-operators-9vzzw\" (UID: \"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145\") " pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:58 crc kubenswrapper[4885]: I0130 00:14:58.941687 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.239494 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pbj45"] Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.242988 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.247829 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9vzzw"] Jan 30 00:14:59 crc kubenswrapper[4885]: W0130 00:14:59.249203 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6b6b28d_5a6b_4ce2_b5e9_3834e7b0f145.slice/crio-b98c87d5f6e0e9be9b939cee5ae7bca6b190d1512e12637fb01bcdb97ff73208 WatchSource:0}: Error finding container b98c87d5f6e0e9be9b939cee5ae7bca6b190d1512e12637fb01bcdb97ff73208: Status 404 returned error can't find the container with id b98c87d5f6e0e9be9b939cee5ae7bca6b190d1512e12637fb01bcdb97ff73208 Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.249330 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.266023 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pbj45"] Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.292885 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-utilities\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.293044 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kmxl\" (UniqueName: \"kubernetes.io/projected/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-kube-api-access-8kmxl\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.293135 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-catalog-content\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.344418 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vzzw" event={"ID":"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145","Type":"ContainerStarted","Data":"b98c87d5f6e0e9be9b939cee5ae7bca6b190d1512e12637fb01bcdb97ff73208"} Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.346655 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6vph" event={"ID":"ae480a27-919c-4a53-9a19-646d9af18fa9","Type":"ContainerStarted","Data":"807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b"} Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.395431 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-utilities\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.395723 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kmxl\" (UniqueName: \"kubernetes.io/projected/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-kube-api-access-8kmxl\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.395984 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-catalog-content\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.396705 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-utilities\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.397170 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-catalog-content\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.421069 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kmxl\" (UniqueName: \"kubernetes.io/projected/bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4-kube-api-access-8kmxl\") pod \"community-operators-pbj45\" (UID: \"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4\") " pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.606863 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.821322 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p6vph" podStartSLOduration=2.422792875 podStartE2EDuration="3.821300573s" podCreationTimestamp="2026-01-30 00:14:56 +0000 UTC" firstStartedPulling="2026-01-30 00:14:57.320761713 +0000 UTC m=+383.912233461" lastFinishedPulling="2026-01-30 00:14:58.719269401 +0000 UTC m=+385.310741159" observedRunningTime="2026-01-30 00:14:59.378099076 +0000 UTC m=+385.969570824" watchObservedRunningTime="2026-01-30 00:14:59.821300573 +0000 UTC m=+386.412772321" Jan 30 00:14:59 crc kubenswrapper[4885]: I0130 00:14:59.824974 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pbj45"] Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.174444 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.174803 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.181324 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2"] Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.182124 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.183797 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2"] Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.184550 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.184834 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.307328 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd7617d-7acd-465b-add2-f703a4d3f8e4-config-volume\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.307378 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zd7c\" (UniqueName: \"kubernetes.io/projected/7bd7617d-7acd-465b-add2-f703a4d3f8e4-kube-api-access-9zd7c\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.307428 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd7617d-7acd-465b-add2-f703a4d3f8e4-secret-volume\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.354689 4885 generic.go:334] "Generic (PLEG): container finished" podID="f8a63fc0-9c8d-4318-a69c-5d6463f40f0d" containerID="9daba3035f3b9fdc640ff01c9af7d37653266eabf5987c630b9f6cdcff0ce534" exitCode=0 Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.354751 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qj7dd" event={"ID":"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d","Type":"ContainerDied","Data":"9daba3035f3b9fdc640ff01c9af7d37653266eabf5987c630b9f6cdcff0ce534"} Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.356251 4885 generic.go:334] "Generic (PLEG): container finished" podID="d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145" containerID="d0d9a59d250f65d81284f89761eaea584353343afc7ac76b2de8302e5ec13267" exitCode=0 Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.356307 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vzzw" event={"ID":"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145","Type":"ContainerDied","Data":"d0d9a59d250f65d81284f89761eaea584353343afc7ac76b2de8302e5ec13267"} Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.358110 4885 generic.go:334] "Generic (PLEG): container finished" podID="bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4" containerID="04c8a7f3ea3b68a56d4935fdd2c0a248300271c82e6f3b58b4e2e3fe7b4f29c5" exitCode=0 Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.358142 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbj45" event={"ID":"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4","Type":"ContainerDied","Data":"04c8a7f3ea3b68a56d4935fdd2c0a248300271c82e6f3b58b4e2e3fe7b4f29c5"} Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.358172 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbj45" event={"ID":"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4","Type":"ContainerStarted","Data":"5443ad236d34c970af055c92032f8c77f09f981c4e159d50bfb80202b6c9347f"} Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.410001 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zd7c\" (UniqueName: \"kubernetes.io/projected/7bd7617d-7acd-465b-add2-f703a4d3f8e4-kube-api-access-9zd7c\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.410056 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd7617d-7acd-465b-add2-f703a4d3f8e4-config-volume\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.410093 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd7617d-7acd-465b-add2-f703a4d3f8e4-secret-volume\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.410867 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd7617d-7acd-465b-add2-f703a4d3f8e4-config-volume\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.421593 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd7617d-7acd-465b-add2-f703a4d3f8e4-secret-volume\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.434367 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zd7c\" (UniqueName: \"kubernetes.io/projected/7bd7617d-7acd-465b-add2-f703a4d3f8e4-kube-api-access-9zd7c\") pod \"collect-profiles-29495535-tzwq2\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.506398 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:00 crc kubenswrapper[4885]: I0130 00:15:00.716195 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2"] Jan 30 00:15:00 crc kubenswrapper[4885]: W0130 00:15:00.729041 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bd7617d_7acd_465b_add2_f703a4d3f8e4.slice/crio-723a6cd087803f0e1f9b4043d0ab61f098185809e48bd69d460be8cb0b774f17 WatchSource:0}: Error finding container 723a6cd087803f0e1f9b4043d0ab61f098185809e48bd69d460be8cb0b774f17: Status 404 returned error can't find the container with id 723a6cd087803f0e1f9b4043d0ab61f098185809e48bd69d460be8cb0b774f17 Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.366675 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qj7dd" event={"ID":"f8a63fc0-9c8d-4318-a69c-5d6463f40f0d","Type":"ContainerStarted","Data":"1788dd90477fff1f33495ac83b81b0ccdfeba9997e74c60212303c7de55a51e2"} Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.369453 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vzzw" event={"ID":"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145","Type":"ContainerStarted","Data":"7b86f4b20387f9cfa259a768f247ab2018b9acb99f31550012efbb8bf29d5260"} Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.372059 4885 generic.go:334] "Generic (PLEG): container finished" podID="7bd7617d-7acd-465b-add2-f703a4d3f8e4" containerID="9dcf015bad06dc1d52bc31953fb430e7da2c5dda9cc0e2c091c5a348a27b2bf5" exitCode=0 Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.372203 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" event={"ID":"7bd7617d-7acd-465b-add2-f703a4d3f8e4","Type":"ContainerDied","Data":"9dcf015bad06dc1d52bc31953fb430e7da2c5dda9cc0e2c091c5a348a27b2bf5"} Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.372255 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" event={"ID":"7bd7617d-7acd-465b-add2-f703a4d3f8e4","Type":"ContainerStarted","Data":"723a6cd087803f0e1f9b4043d0ab61f098185809e48bd69d460be8cb0b774f17"} Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.375300 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbj45" event={"ID":"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4","Type":"ContainerStarted","Data":"601cbd03f5a5f0c77fec8fe2b88dce74520034e08d83df90acab1543ad61873a"} Jan 30 00:15:01 crc kubenswrapper[4885]: I0130 00:15:01.395210 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qj7dd" podStartSLOduration=2.953122064 podStartE2EDuration="5.395183243s" podCreationTimestamp="2026-01-30 00:14:56 +0000 UTC" firstStartedPulling="2026-01-30 00:14:58.337641009 +0000 UTC m=+384.929112777" lastFinishedPulling="2026-01-30 00:15:00.779702208 +0000 UTC m=+387.371173956" observedRunningTime="2026-01-30 00:15:01.391462772 +0000 UTC m=+387.982934520" watchObservedRunningTime="2026-01-30 00:15:01.395183243 +0000 UTC m=+387.986654981" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.383267 4885 generic.go:334] "Generic (PLEG): container finished" podID="bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4" containerID="601cbd03f5a5f0c77fec8fe2b88dce74520034e08d83df90acab1543ad61873a" exitCode=0 Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.383391 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbj45" event={"ID":"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4","Type":"ContainerDied","Data":"601cbd03f5a5f0c77fec8fe2b88dce74520034e08d83df90acab1543ad61873a"} Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.393272 4885 generic.go:334] "Generic (PLEG): container finished" podID="d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145" containerID="7b86f4b20387f9cfa259a768f247ab2018b9acb99f31550012efbb8bf29d5260" exitCode=0 Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.393360 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vzzw" event={"ID":"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145","Type":"ContainerDied","Data":"7b86f4b20387f9cfa259a768f247ab2018b9acb99f31550012efbb8bf29d5260"} Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.689328 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.849406 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zd7c\" (UniqueName: \"kubernetes.io/projected/7bd7617d-7acd-465b-add2-f703a4d3f8e4-kube-api-access-9zd7c\") pod \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.850241 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd7617d-7acd-465b-add2-f703a4d3f8e4-config-volume\") pod \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.850317 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd7617d-7acd-465b-add2-f703a4d3f8e4-secret-volume\") pod \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\" (UID: \"7bd7617d-7acd-465b-add2-f703a4d3f8e4\") " Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.850938 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd7617d-7acd-465b-add2-f703a4d3f8e4-config-volume" (OuterVolumeSpecName: "config-volume") pod "7bd7617d-7acd-465b-add2-f703a4d3f8e4" (UID: "7bd7617d-7acd-465b-add2-f703a4d3f8e4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.856218 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bd7617d-7acd-465b-add2-f703a4d3f8e4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7bd7617d-7acd-465b-add2-f703a4d3f8e4" (UID: "7bd7617d-7acd-465b-add2-f703a4d3f8e4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.856262 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bd7617d-7acd-465b-add2-f703a4d3f8e4-kube-api-access-9zd7c" (OuterVolumeSpecName: "kube-api-access-9zd7c") pod "7bd7617d-7acd-465b-add2-f703a4d3f8e4" (UID: "7bd7617d-7acd-465b-add2-f703a4d3f8e4"). InnerVolumeSpecName "kube-api-access-9zd7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.953386 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zd7c\" (UniqueName: \"kubernetes.io/projected/7bd7617d-7acd-465b-add2-f703a4d3f8e4-kube-api-access-9zd7c\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.953442 4885 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bd7617d-7acd-465b-add2-f703a4d3f8e4-config-volume\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:02 crc kubenswrapper[4885]: I0130 00:15:02.953453 4885 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7bd7617d-7acd-465b-add2-f703a4d3f8e4-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.400840 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vzzw" event={"ID":"d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145","Type":"ContainerStarted","Data":"fb969dd6e65e3f3e2e3ca1c006ec00a8dc94dc87ce24c295d8ba487bf09b1713"} Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.403443 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.403941 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29495535-tzwq2" event={"ID":"7bd7617d-7acd-465b-add2-f703a4d3f8e4","Type":"ContainerDied","Data":"723a6cd087803f0e1f9b4043d0ab61f098185809e48bd69d460be8cb0b774f17"} Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.404118 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="723a6cd087803f0e1f9b4043d0ab61f098185809e48bd69d460be8cb0b774f17" Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.407400 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pbj45" event={"ID":"bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4","Type":"ContainerStarted","Data":"b95cbaecd8cb1bed8ba2a87fc6df4a55d24e9f4bb6b0f466863d22e1cfcd439b"} Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.418998 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9vzzw" podStartSLOduration=2.865124121 podStartE2EDuration="5.418963232s" podCreationTimestamp="2026-01-30 00:14:58 +0000 UTC" firstStartedPulling="2026-01-30 00:15:00.358682834 +0000 UTC m=+386.950154582" lastFinishedPulling="2026-01-30 00:15:02.912521945 +0000 UTC m=+389.503993693" observedRunningTime="2026-01-30 00:15:03.417367829 +0000 UTC m=+390.008839577" watchObservedRunningTime="2026-01-30 00:15:03.418963232 +0000 UTC m=+390.010434980" Jan 30 00:15:03 crc kubenswrapper[4885]: I0130 00:15:03.439851 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pbj45" podStartSLOduration=2.00324796 podStartE2EDuration="4.43982441s" podCreationTimestamp="2026-01-30 00:14:59 +0000 UTC" firstStartedPulling="2026-01-30 00:15:00.360146584 +0000 UTC m=+386.951618332" lastFinishedPulling="2026-01-30 00:15:02.796723034 +0000 UTC m=+389.388194782" observedRunningTime="2026-01-30 00:15:03.43797587 +0000 UTC m=+390.029447628" watchObservedRunningTime="2026-01-30 00:15:03.43982441 +0000 UTC m=+390.031296158" Jan 30 00:15:04 crc kubenswrapper[4885]: I0130 00:15:04.737844 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fsw8p" Jan 30 00:15:04 crc kubenswrapper[4885]: I0130 00:15:04.792950 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dnhsm"] Jan 30 00:15:06 crc kubenswrapper[4885]: I0130 00:15:06.557026 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:15:06 crc kubenswrapper[4885]: I0130 00:15:06.557086 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:15:06 crc kubenswrapper[4885]: I0130 00:15:06.632357 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:15:07 crc kubenswrapper[4885]: I0130 00:15:07.167884 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:15:07 crc kubenswrapper[4885]: I0130 00:15:07.167924 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:15:07 crc kubenswrapper[4885]: I0130 00:15:07.229520 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:15:07 crc kubenswrapper[4885]: I0130 00:15:07.479046 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:15:07 crc kubenswrapper[4885]: I0130 00:15:07.503497 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qj7dd" Jan 30 00:15:08 crc kubenswrapper[4885]: I0130 00:15:08.942641 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:15:08 crc kubenswrapper[4885]: I0130 00:15:08.943052 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:15:09 crc kubenswrapper[4885]: I0130 00:15:09.607612 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:15:09 crc kubenswrapper[4885]: I0130 00:15:09.607694 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:15:09 crc kubenswrapper[4885]: I0130 00:15:09.672004 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:15:09 crc kubenswrapper[4885]: I0130 00:15:09.995341 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9vzzw" podUID="d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145" containerName="registry-server" probeResult="failure" output=< Jan 30 00:15:09 crc kubenswrapper[4885]: timeout: failed to connect service ":50051" within 1s Jan 30 00:15:09 crc kubenswrapper[4885]: > Jan 30 00:15:10 crc kubenswrapper[4885]: I0130 00:15:10.491338 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pbj45" Jan 30 00:15:19 crc kubenswrapper[4885]: I0130 00:15:19.025117 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:15:19 crc kubenswrapper[4885]: I0130 00:15:19.077767 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9vzzw" Jan 30 00:15:29 crc kubenswrapper[4885]: I0130 00:15:29.842140 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" podUID="d3f0f887-4427-41fa-a495-470f6a1da8ae" containerName="registry" containerID="cri-o://0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8" gracePeriod=30 Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.144354 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.145254 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.149381 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.150051 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d0446ae30b16adbc17a7c39638b35405117acdb3790f74fd73290c8db7c38e19"} pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.150119 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" containerID="cri-o://d0446ae30b16adbc17a7c39638b35405117acdb3790f74fd73290c8db7c38e19" gracePeriod=600 Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.242493 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.405592 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-trusted-ca\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406119 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3f0f887-4427-41fa-a495-470f6a1da8ae-ca-trust-extracted\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406256 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-bound-sa-token\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406250 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406286 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-certificates\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406343 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvhbh\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-kube-api-access-gvhbh\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406364 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-tls\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406405 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3f0f887-4427-41fa-a495-470f6a1da8ae-installation-pull-secrets\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406613 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"d3f0f887-4427-41fa-a495-470f6a1da8ae\" (UID: \"d3f0f887-4427-41fa-a495-470f6a1da8ae\") " Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406884 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.406915 4885 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.412223 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-kube-api-access-gvhbh" (OuterVolumeSpecName: "kube-api-access-gvhbh") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "kube-api-access-gvhbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.412463 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.412563 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3f0f887-4427-41fa-a495-470f6a1da8ae-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.412644 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.418605 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.427459 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3f0f887-4427-41fa-a495-470f6a1da8ae-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "d3f0f887-4427-41fa-a495-470f6a1da8ae" (UID: "d3f0f887-4427-41fa-a495-470f6a1da8ae"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.508811 4885 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d3f0f887-4427-41fa-a495-470f6a1da8ae-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.508866 4885 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.508880 4885 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.508897 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvhbh\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-kube-api-access-gvhbh\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.508912 4885 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d3f0f887-4427-41fa-a495-470f6a1da8ae-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.508926 4885 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d3f0f887-4427-41fa-a495-470f6a1da8ae-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.582450 4885 generic.go:334] "Generic (PLEG): container finished" podID="41b99e9c-eadb-404c-9596-1b102ac85157" containerID="d0446ae30b16adbc17a7c39638b35405117acdb3790f74fd73290c8db7c38e19" exitCode=0 Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.582551 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerDied","Data":"d0446ae30b16adbc17a7c39638b35405117acdb3790f74fd73290c8db7c38e19"} Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.582625 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"b2dbc872a4bd6c3cbe912a7eb4ccfad14a5c3be1740b07b8e8ff70733d344978"} Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.582659 4885 scope.go:117] "RemoveContainer" containerID="f91fa33d88d5a67ed405bc7ba74bd1cfb18d290b5bb8dfba82ce6e753519d339" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.585841 4885 generic.go:334] "Generic (PLEG): container finished" podID="d3f0f887-4427-41fa-a495-470f6a1da8ae" containerID="0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8" exitCode=0 Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.585882 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" event={"ID":"d3f0f887-4427-41fa-a495-470f6a1da8ae","Type":"ContainerDied","Data":"0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8"} Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.585908 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.585915 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dnhsm" event={"ID":"d3f0f887-4427-41fa-a495-470f6a1da8ae","Type":"ContainerDied","Data":"6fc7c8693bc21d76ef3115df9816e0361f3acd338a6e37cc4ddafe3406cbadf9"} Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.622685 4885 scope.go:117] "RemoveContainer" containerID="0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.624880 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dnhsm"] Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.631492 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dnhsm"] Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.653650 4885 scope.go:117] "RemoveContainer" containerID="0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8" Jan 30 00:15:30 crc kubenswrapper[4885]: E0130 00:15:30.654500 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8\": container with ID starting with 0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8 not found: ID does not exist" containerID="0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8" Jan 30 00:15:30 crc kubenswrapper[4885]: I0130 00:15:30.654586 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8"} err="failed to get container status \"0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8\": rpc error: code = NotFound desc = could not find container \"0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8\": container with ID starting with 0e366299c015f6cec7f94bcbce31c19636871c97b921745b53168e4f85c0eda8 not found: ID does not exist" Jan 30 00:15:32 crc kubenswrapper[4885]: I0130 00:15:32.150300 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3f0f887-4427-41fa-a495-470f6a1da8ae" path="/var/lib/kubelet/pods/d3f0f887-4427-41fa-a495-470f6a1da8ae/volumes" Jan 30 00:17:30 crc kubenswrapper[4885]: I0130 00:17:30.144554 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:17:30 crc kubenswrapper[4885]: I0130 00:17:30.145633 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:17:34 crc kubenswrapper[4885]: I0130 00:17:34.437198 4885 scope.go:117] "RemoveContainer" containerID="016c6504227cf26925de196719eff21c23226afb0d56ab2fcfe8fbb824bf2bc0" Jan 30 00:18:00 crc kubenswrapper[4885]: I0130 00:18:00.144432 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:18:00 crc kubenswrapper[4885]: I0130 00:18:00.145156 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.143910 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.144655 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.153471 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.154581 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2dbc872a4bd6c3cbe912a7eb4ccfad14a5c3be1740b07b8e8ff70733d344978"} pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.154718 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" containerID="cri-o://b2dbc872a4bd6c3cbe912a7eb4ccfad14a5c3be1740b07b8e8ff70733d344978" gracePeriod=600 Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.851020 4885 generic.go:334] "Generic (PLEG): container finished" podID="41b99e9c-eadb-404c-9596-1b102ac85157" containerID="b2dbc872a4bd6c3cbe912a7eb4ccfad14a5c3be1740b07b8e8ff70733d344978" exitCode=0 Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.851108 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerDied","Data":"b2dbc872a4bd6c3cbe912a7eb4ccfad14a5c3be1740b07b8e8ff70733d344978"} Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.851382 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"734caa87bbc7a31bd529920dc9d2ea498fed57fb22424523a2081de46284edd7"} Jan 30 00:18:30 crc kubenswrapper[4885]: I0130 00:18:30.851411 4885 scope.go:117] "RemoveContainer" containerID="d0446ae30b16adbc17a7c39638b35405117acdb3790f74fd73290c8db7c38e19" Jan 30 00:18:34 crc kubenswrapper[4885]: I0130 00:18:34.470924 4885 scope.go:117] "RemoveContainer" containerID="5ec6cd48b4011c36d3be8a7b7465fbd07428b401164c87ac6f721b3bf71b1e82" Jan 30 00:18:34 crc kubenswrapper[4885]: I0130 00:18:34.515022 4885 scope.go:117] "RemoveContainer" containerID="9e98b1ac523c55774442e0b0331d86559c525b25f5ac9809138b6ddcaecea561" Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.922750 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hwpvs"] Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.924584 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-controller" containerID="cri-o://e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.924674 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="nbdb" containerID="cri-o://9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.924983 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="sbdb" containerID="cri-o://4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.925182 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-node" containerID="cri-o://5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.925294 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.925283 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="northd" containerID="cri-o://ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.925253 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-acl-logging" containerID="cri-o://b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24" gracePeriod=30 Jan 30 00:19:13 crc kubenswrapper[4885]: I0130 00:19:13.981086 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" containerID="cri-o://4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda" gracePeriod=30 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.137304 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/2.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.138162 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/1.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.138194 4885 generic.go:334] "Generic (PLEG): container finished" podID="3f11e547-11fd-417a-be4a-e4f37d8e7839" containerID="2681aea94aa236ce8fbf8e060c1ff8dd558f4a63c3b6a0382c7b9f70ffa15280" exitCode=2 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.138239 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerDied","Data":"2681aea94aa236ce8fbf8e060c1ff8dd558f4a63c3b6a0382c7b9f70ffa15280"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.138278 4885 scope.go:117] "RemoveContainer" containerID="d609ef0f63a56e8d8c04c39f0d20715f8c0e42915ccf9a7fdfd1bec2a35dadc9" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.138737 4885 scope.go:117] "RemoveContainer" containerID="2681aea94aa236ce8fbf8e060c1ff8dd558f4a63c3b6a0382c7b9f70ffa15280" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.138998 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-xmv9h_openshift-multus(3f11e547-11fd-417a-be4a-e4f37d8e7839)\"" pod="openshift-multus/multus-xmv9h" podUID="3f11e547-11fd-417a-be4a-e4f37d8e7839" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.147449 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/3.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.150545 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovn-acl-logging/0.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.150969 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovn-controller/0.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157208 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda" exitCode=0 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157236 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5" exitCode=0 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157246 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519" exitCode=0 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157255 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b" exitCode=0 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157262 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24" exitCode=143 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157271 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8" exitCode=143 Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157291 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157350 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157362 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157372 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157384 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.157394 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8"} Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.212107 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovnkube-controller/3.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.212484 4885 scope.go:117] "RemoveContainer" containerID="5780df1fee00b2c2d8653b44046f36214d66f32ce87ebeceea15fc88e8302dc9" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.215323 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovn-acl-logging/0.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.215953 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovn-controller/0.log" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.216445 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.275883 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-sk9n7"] Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276754 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd7617d-7acd-465b-add2-f703a4d3f8e4" containerName="collect-profiles" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276787 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd7617d-7acd-465b-add2-f703a4d3f8e4" containerName="collect-profiles" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276799 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kubecfg-setup" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276804 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kubecfg-setup" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276810 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276817 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276830 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="nbdb" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276836 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="nbdb" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276846 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276852 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276860 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3f0f887-4427-41fa-a495-470f6a1da8ae" containerName="registry" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276866 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3f0f887-4427-41fa-a495-470f6a1da8ae" containerName="registry" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276873 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276879 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276891 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276899 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276906 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276912 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276919 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276936 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276946 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-acl-logging" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276952 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-acl-logging" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276962 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-node" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276968 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-node" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276975 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="northd" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276981 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="northd" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.276989 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="sbdb" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.276995 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="sbdb" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277086 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-ovn-metrics" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277100 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277107 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="kube-rbac-proxy-node" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277113 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277119 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3f0f887-4427-41fa-a495-470f6a1da8ae" containerName="registry" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277128 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277136 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277141 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277149 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="sbdb" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277156 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bd7617d-7acd-465b-add2-f703a4d3f8e4" containerName="collect-profiles" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277166 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovn-acl-logging" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277173 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="nbdb" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277181 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="northd" Jan 30 00:19:14 crc kubenswrapper[4885]: E0130 00:19:14.277285 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277295 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.277403 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerName="ovnkube-controller" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.282155 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.376689 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-script-lib\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.376756 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-etc-openvswitch\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.376846 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.376979 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-kubelet\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377013 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-netns\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377055 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377103 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377158 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/147e5e96-db98-498f-b4a4-927d73cb5db5-ovn-node-metrics-cert\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377192 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-env-overrides\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377215 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-bin\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377242 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-slash\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377257 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-netd\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377274 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-log-socket\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377289 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-openvswitch\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377291 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377294 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377311 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-ovn\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377334 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377360 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-slash" (OuterVolumeSpecName: "host-slash") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377388 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377419 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-config\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377415 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377440 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-var-lib-openvswitch\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377466 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-ovn-kubernetes\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377459 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-log-socket" (OuterVolumeSpecName: "log-socket") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377514 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377448 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377490 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhwkm\" (UniqueName: \"kubernetes.io/projected/147e5e96-db98-498f-b4a4-927d73cb5db5-kube-api-access-dhwkm\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377613 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-systemd-units\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377540 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377553 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377623 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377660 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377683 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-systemd\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377724 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-node-log\") pod \"147e5e96-db98-498f-b4a4-927d73cb5db5\" (UID: \"147e5e96-db98-498f-b4a4-927d73cb5db5\") " Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377887 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-node-log" (OuterVolumeSpecName: "node-log") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.377981 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-run-netns\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378051 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378090 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-run-ovn-kubernetes\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378164 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-cni-netd\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378212 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-etc-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378295 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovn-node-metrics-cert\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378392 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-var-lib-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378458 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovnkube-config\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378510 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-cni-bin\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378577 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378628 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-ovn\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378682 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-systemd-units\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378726 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-slash\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378840 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-log-socket\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378888 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-kubelet\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378953 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-systemd\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.378985 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovnkube-script-lib\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379021 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379079 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twgzr\" (UniqueName: \"kubernetes.io/projected/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-kube-api-access-twgzr\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379123 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-env-overrides\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379224 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-node-log\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379346 4885 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379377 4885 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379398 4885 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379418 4885 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379437 4885 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379454 4885 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-node-log\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379471 4885 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379488 4885 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379505 4885 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379523 4885 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379541 4885 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/147e5e96-db98-498f-b4a4-927d73cb5db5-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379558 4885 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379574 4885 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-slash\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379590 4885 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379606 4885 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-log-socket\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379622 4885 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.379639 4885 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.383176 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/147e5e96-db98-498f-b4a4-927d73cb5db5-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.383634 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/147e5e96-db98-498f-b4a4-927d73cb5db5-kube-api-access-dhwkm" (OuterVolumeSpecName: "kube-api-access-dhwkm") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "kube-api-access-dhwkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.401412 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "147e5e96-db98-498f-b4a4-927d73cb5db5" (UID: "147e5e96-db98-498f-b4a4-927d73cb5db5"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.481846 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-systemd-units\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482172 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-slash\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482276 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-log-socket\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482371 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-kubelet\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482468 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-systemd\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482562 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovnkube-script-lib\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482659 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482791 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twgzr\" (UniqueName: \"kubernetes.io/projected/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-kube-api-access-twgzr\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.483892 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovnkube-script-lib\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482925 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-log-socket\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.481992 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-systemd-units\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482993 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-slash\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.483036 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.483072 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-systemd\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.483987 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-env-overrides\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.482890 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-kubelet\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484311 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-node-log\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484348 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-node-log\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484456 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-run-netns\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484514 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-run-ovn-kubernetes\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484541 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-cni-netd\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484565 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-env-overrides\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484594 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-etc-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484592 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-run-netns\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484623 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-run-ovn-kubernetes\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484642 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-cni-netd\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484566 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-etc-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484723 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovn-node-metrics-cert\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484802 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-var-lib-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484850 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovnkube-config\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.484938 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-cni-bin\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485020 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485060 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-ovn\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485234 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhwkm\" (UniqueName: \"kubernetes.io/projected/147e5e96-db98-498f-b4a4-927d73cb5db5-kube-api-access-dhwkm\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485298 4885 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/147e5e96-db98-498f-b4a4-927d73cb5db5-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485311 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-host-cni-bin\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485324 4885 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/147e5e96-db98-498f-b4a4-927d73cb5db5-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485338 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-var-lib-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485352 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-openvswitch\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485423 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-run-ovn\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.485872 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovnkube-config\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.489934 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-ovn-node-metrics-cert\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.510037 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twgzr\" (UniqueName: \"kubernetes.io/projected/e4187e3b-b01c-4d0a-81c0-4584ac7074b4-kube-api-access-twgzr\") pod \"ovnkube-node-sk9n7\" (UID: \"e4187e3b-b01c-4d0a-81c0-4584ac7074b4\") " pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:14 crc kubenswrapper[4885]: I0130 00:19:14.600464 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.169756 4885 generic.go:334] "Generic (PLEG): container finished" podID="e4187e3b-b01c-4d0a-81c0-4584ac7074b4" containerID="f60dd7efa97a466871210af53b6ddad027187ce39859f04aedbee3b1f2bb7134" exitCode=0 Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.169845 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerDied","Data":"f60dd7efa97a466871210af53b6ddad027187ce39859f04aedbee3b1f2bb7134"} Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.171693 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"262d1a1b5b84b6650ee2288345fe6ce9133beb7c4508ffceacefec0712e967f8"} Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.176702 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/2.log" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.185252 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovn-acl-logging/0.log" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.186634 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-hwpvs_147e5e96-db98-498f-b4a4-927d73cb5db5/ovn-controller/0.log" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.187537 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc" exitCode=0 Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.187630 4885 generic.go:334] "Generic (PLEG): container finished" podID="147e5e96-db98-498f-b4a4-927d73cb5db5" containerID="9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e" exitCode=0 Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.187684 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc"} Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.187727 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e"} Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.187751 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" event={"ID":"147e5e96-db98-498f-b4a4-927d73cb5db5","Type":"ContainerDied","Data":"f4f9113690925a02f48604f583a736200e31613e3fe941f9675bfdcdfaa4cad3"} Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.187803 4885 scope.go:117] "RemoveContainer" containerID="4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.188071 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hwpvs" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.226153 4885 scope.go:117] "RemoveContainer" containerID="4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.262172 4885 scope.go:117] "RemoveContainer" containerID="9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.285857 4885 scope.go:117] "RemoveContainer" containerID="ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.296949 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hwpvs"] Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.301552 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hwpvs"] Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.314004 4885 scope.go:117] "RemoveContainer" containerID="a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.333989 4885 scope.go:117] "RemoveContainer" containerID="5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.347745 4885 scope.go:117] "RemoveContainer" containerID="b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.360431 4885 scope.go:117] "RemoveContainer" containerID="e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.374225 4885 scope.go:117] "RemoveContainer" containerID="2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.399499 4885 scope.go:117] "RemoveContainer" containerID="4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.400511 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda\": container with ID starting with 4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda not found: ID does not exist" containerID="4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.400549 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda"} err="failed to get container status \"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda\": rpc error: code = NotFound desc = could not find container \"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda\": container with ID starting with 4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.400574 4885 scope.go:117] "RemoveContainer" containerID="4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.400981 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\": container with ID starting with 4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc not found: ID does not exist" containerID="4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.401007 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc"} err="failed to get container status \"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\": rpc error: code = NotFound desc = could not find container \"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\": container with ID starting with 4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.401023 4885 scope.go:117] "RemoveContainer" containerID="9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.401340 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\": container with ID starting with 9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e not found: ID does not exist" containerID="9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.401393 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e"} err="failed to get container status \"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\": rpc error: code = NotFound desc = could not find container \"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\": container with ID starting with 9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.401430 4885 scope.go:117] "RemoveContainer" containerID="ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.401691 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\": container with ID starting with ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5 not found: ID does not exist" containerID="ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.401727 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5"} err="failed to get container status \"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\": rpc error: code = NotFound desc = could not find container \"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\": container with ID starting with ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.401742 4885 scope.go:117] "RemoveContainer" containerID="a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.402259 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\": container with ID starting with a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519 not found: ID does not exist" containerID="a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.402310 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519"} err="failed to get container status \"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\": rpc error: code = NotFound desc = could not find container \"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\": container with ID starting with a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.402342 4885 scope.go:117] "RemoveContainer" containerID="5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.402639 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\": container with ID starting with 5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b not found: ID does not exist" containerID="5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.402664 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b"} err="failed to get container status \"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\": rpc error: code = NotFound desc = could not find container \"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\": container with ID starting with 5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.402677 4885 scope.go:117] "RemoveContainer" containerID="b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.403032 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\": container with ID starting with b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24 not found: ID does not exist" containerID="b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.403054 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24"} err="failed to get container status \"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\": rpc error: code = NotFound desc = could not find container \"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\": container with ID starting with b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.403067 4885 scope.go:117] "RemoveContainer" containerID="e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.403259 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\": container with ID starting with e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8 not found: ID does not exist" containerID="e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.403275 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8"} err="failed to get container status \"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\": rpc error: code = NotFound desc = could not find container \"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\": container with ID starting with e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.403288 4885 scope.go:117] "RemoveContainer" containerID="2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc" Jan 30 00:19:15 crc kubenswrapper[4885]: E0130 00:19:15.403544 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\": container with ID starting with 2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc not found: ID does not exist" containerID="2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.403562 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc"} err="failed to get container status \"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\": rpc error: code = NotFound desc = could not find container \"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\": container with ID starting with 2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.403654 4885 scope.go:117] "RemoveContainer" containerID="4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.404220 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda"} err="failed to get container status \"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda\": rpc error: code = NotFound desc = could not find container \"4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda\": container with ID starting with 4c863c3754e027f805460ff2c446494df4b4ed59f4d80a94d2dd7d2a276fdeda not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.404240 4885 scope.go:117] "RemoveContainer" containerID="4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.405132 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc"} err="failed to get container status \"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\": rpc error: code = NotFound desc = could not find container \"4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc\": container with ID starting with 4f3c931f2f96a62e1b933bd7be6d61214dc39ff4259356f608965657d4362bcc not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.405180 4885 scope.go:117] "RemoveContainer" containerID="9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.405572 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e"} err="failed to get container status \"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\": rpc error: code = NotFound desc = could not find container \"9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e\": container with ID starting with 9e776384c096e607f19dca3edd15b7942aec680e37d4a733d16a8dc41c20262e not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.405622 4885 scope.go:117] "RemoveContainer" containerID="ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.406228 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5"} err="failed to get container status \"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\": rpc error: code = NotFound desc = could not find container \"ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5\": container with ID starting with ca74125b202cce368a092cc1c165d3a2427bdca4c4ae2543c219eda9b437a6b5 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.406255 4885 scope.go:117] "RemoveContainer" containerID="a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.407223 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519"} err="failed to get container status \"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\": rpc error: code = NotFound desc = could not find container \"a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519\": container with ID starting with a22b013e914e7c9caaff925bf086f8a18c7c384a7a6ccff4f9ab91767b309519 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.407249 4885 scope.go:117] "RemoveContainer" containerID="5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.407585 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b"} err="failed to get container status \"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\": rpc error: code = NotFound desc = could not find container \"5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b\": container with ID starting with 5f8e57035e3a97d9598299c18ce94fa5d00318caec8188b4b211280fc22a249b not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.407625 4885 scope.go:117] "RemoveContainer" containerID="b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.407913 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24"} err="failed to get container status \"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\": rpc error: code = NotFound desc = could not find container \"b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24\": container with ID starting with b2ebc30d6a0c79b584a001b75fc20b41739c45aa29f91a402f25c72fbc746d24 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.407935 4885 scope.go:117] "RemoveContainer" containerID="e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.408193 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8"} err="failed to get container status \"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\": rpc error: code = NotFound desc = could not find container \"e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8\": container with ID starting with e56f82d233bf87e82723cab856a3bb0c2dd706506e6f566ca3391498a0814dd8 not found: ID does not exist" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.408223 4885 scope.go:117] "RemoveContainer" containerID="2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc" Jan 30 00:19:15 crc kubenswrapper[4885]: I0130 00:19:15.408457 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc"} err="failed to get container status \"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\": rpc error: code = NotFound desc = could not find container \"2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc\": container with ID starting with 2469ec7f7428d23e9e56b522748f09bc631a67391e744d360b0c0834a7ee73cc not found: ID does not exist" Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.154305 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="147e5e96-db98-498f-b4a4-927d73cb5db5" path="/var/lib/kubelet/pods/147e5e96-db98-498f-b4a4-927d73cb5db5/volumes" Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.198535 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"16bb171c2ebf21c62a084a473918248858c96c6cc4767a64385676b9614e71cd"} Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.198579 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"4f61a9c0bf9da2adacbdd29890b16f1d484799adc0133a34109af614873436de"} Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.198590 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"efce0b529029e7e9b171e6b8ddeee7ba7ce93aa5dc7416255144d7918187815a"} Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.198600 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"6dc3bca305ae9ce77b273b38bd9ef43ec70f0483a921a8ce5abe01e85fa4b72d"} Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.198610 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"bdc1a9930842ce2523d34148085831b7fcc8358f88473bb74471bc3de192aa92"} Jan 30 00:19:16 crc kubenswrapper[4885]: I0130 00:19:16.198621 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"4006c37279024c16125325a88ba9fb4aa4551c2f35cfa43150250115b9f4fc2b"} Jan 30 00:19:19 crc kubenswrapper[4885]: I0130 00:19:19.230752 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"d03be39a0389c3a29fe1cfc71c3decee39bfa471b8a929ef199d18a6cc3de01c"} Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.247682 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" event={"ID":"e4187e3b-b01c-4d0a-81c0-4584ac7074b4","Type":"ContainerStarted","Data":"56af53ed1ba118068b3241adf0b36c5b0ab42ecd12932ac590b5e105621943aa"} Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.248183 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.248236 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.248247 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.282564 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.283645 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:19:21 crc kubenswrapper[4885]: I0130 00:19:21.285735 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" podStartSLOduration=7.28571962 podStartE2EDuration="7.28571962s" podCreationTimestamp="2026-01-30 00:19:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:19:21.285387281 +0000 UTC m=+647.876859029" watchObservedRunningTime="2026-01-30 00:19:21.28571962 +0000 UTC m=+647.877191388" Jan 30 00:19:27 crc kubenswrapper[4885]: I0130 00:19:27.142329 4885 scope.go:117] "RemoveContainer" containerID="2681aea94aa236ce8fbf8e060c1ff8dd558f4a63c3b6a0382c7b9f70ffa15280" Jan 30 00:19:27 crc kubenswrapper[4885]: E0130 00:19:27.143992 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-xmv9h_openshift-multus(3f11e547-11fd-417a-be4a-e4f37d8e7839)\"" pod="openshift-multus/multus-xmv9h" podUID="3f11e547-11fd-417a-be4a-e4f37d8e7839" Jan 30 00:19:40 crc kubenswrapper[4885]: I0130 00:19:40.142292 4885 scope.go:117] "RemoveContainer" containerID="2681aea94aa236ce8fbf8e060c1ff8dd558f4a63c3b6a0382c7b9f70ffa15280" Jan 30 00:19:40 crc kubenswrapper[4885]: I0130 00:19:40.385404 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-xmv9h_3f11e547-11fd-417a-be4a-e4f37d8e7839/kube-multus/2.log" Jan 30 00:19:40 crc kubenswrapper[4885]: I0130 00:19:40.386091 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-xmv9h" event={"ID":"3f11e547-11fd-417a-be4a-e4f37d8e7839","Type":"ContainerStarted","Data":"e96d275a6b42fdc6f3421bed510a865b10bdfef7e69035e1c5eb757f393ec6be"} Jan 30 00:19:44 crc kubenswrapper[4885]: I0130 00:19:44.629398 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sk9n7" Jan 30 00:20:22 crc kubenswrapper[4885]: I0130 00:20:22.537758 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6vph"] Jan 30 00:20:22 crc kubenswrapper[4885]: I0130 00:20:22.541413 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p6vph" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="registry-server" containerID="cri-o://807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b" gracePeriod=30 Jan 30 00:20:22 crc kubenswrapper[4885]: I0130 00:20:22.916082 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.009381 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-catalog-content\") pod \"ae480a27-919c-4a53-9a19-646d9af18fa9\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.009480 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-utilities\") pod \"ae480a27-919c-4a53-9a19-646d9af18fa9\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.009530 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfsxq\" (UniqueName: \"kubernetes.io/projected/ae480a27-919c-4a53-9a19-646d9af18fa9-kube-api-access-pfsxq\") pod \"ae480a27-919c-4a53-9a19-646d9af18fa9\" (UID: \"ae480a27-919c-4a53-9a19-646d9af18fa9\") " Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.011722 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-utilities" (OuterVolumeSpecName: "utilities") pod "ae480a27-919c-4a53-9a19-646d9af18fa9" (UID: "ae480a27-919c-4a53-9a19-646d9af18fa9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.018338 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae480a27-919c-4a53-9a19-646d9af18fa9-kube-api-access-pfsxq" (OuterVolumeSpecName: "kube-api-access-pfsxq") pod "ae480a27-919c-4a53-9a19-646d9af18fa9" (UID: "ae480a27-919c-4a53-9a19-646d9af18fa9"). InnerVolumeSpecName "kube-api-access-pfsxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.043096 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae480a27-919c-4a53-9a19-646d9af18fa9" (UID: "ae480a27-919c-4a53-9a19-646d9af18fa9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.111391 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.111445 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfsxq\" (UniqueName: \"kubernetes.io/projected/ae480a27-919c-4a53-9a19-646d9af18fa9-kube-api-access-pfsxq\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.111464 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae480a27-919c-4a53-9a19-646d9af18fa9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.673681 4885 generic.go:334] "Generic (PLEG): container finished" podID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerID="807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b" exitCode=0 Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.673751 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6vph" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.673788 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6vph" event={"ID":"ae480a27-919c-4a53-9a19-646d9af18fa9","Type":"ContainerDied","Data":"807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b"} Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.675015 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6vph" event={"ID":"ae480a27-919c-4a53-9a19-646d9af18fa9","Type":"ContainerDied","Data":"bae29355a90aba06ea8e6396e9fca74c352a7b027b3bb457b70d28058ba62a9b"} Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.675048 4885 scope.go:117] "RemoveContainer" containerID="807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.692438 4885 scope.go:117] "RemoveContainer" containerID="a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.713142 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6vph"] Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.718161 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6vph"] Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.725641 4885 scope.go:117] "RemoveContainer" containerID="07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.741669 4885 scope.go:117] "RemoveContainer" containerID="807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b" Jan 30 00:20:23 crc kubenswrapper[4885]: E0130 00:20:23.742247 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b\": container with ID starting with 807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b not found: ID does not exist" containerID="807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.742284 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b"} err="failed to get container status \"807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b\": rpc error: code = NotFound desc = could not find container \"807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b\": container with ID starting with 807c13aa1a36df42a8664ae33dc645990e9c8c921b1863ba3a9e42995e3e0f8b not found: ID does not exist" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.742311 4885 scope.go:117] "RemoveContainer" containerID="a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054" Jan 30 00:20:23 crc kubenswrapper[4885]: E0130 00:20:23.742736 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054\": container with ID starting with a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054 not found: ID does not exist" containerID="a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.742813 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054"} err="failed to get container status \"a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054\": rpc error: code = NotFound desc = could not find container \"a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054\": container with ID starting with a7eb1416aba71efac95147e7ae10099b47fe49c36c8943415d55f8a311e51054 not found: ID does not exist" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.742849 4885 scope.go:117] "RemoveContainer" containerID="07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736" Jan 30 00:20:23 crc kubenswrapper[4885]: E0130 00:20:23.743142 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736\": container with ID starting with 07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736 not found: ID does not exist" containerID="07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736" Jan 30 00:20:23 crc kubenswrapper[4885]: I0130 00:20:23.743168 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736"} err="failed to get container status \"07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736\": rpc error: code = NotFound desc = could not find container \"07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736\": container with ID starting with 07c4a90c074c0eb03395eebc65a62b9823f86544ef38865dcf508ac830acb736 not found: ID does not exist" Jan 30 00:20:24 crc kubenswrapper[4885]: I0130 00:20:24.148739 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" path="/var/lib/kubelet/pods/ae480a27-919c-4a53-9a19-646d9af18fa9/volumes" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.473958 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj"] Jan 30 00:20:26 crc kubenswrapper[4885]: E0130 00:20:26.474424 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="registry-server" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.474438 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="registry-server" Jan 30 00:20:26 crc kubenswrapper[4885]: E0130 00:20:26.474455 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="extract-content" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.474460 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="extract-content" Jan 30 00:20:26 crc kubenswrapper[4885]: E0130 00:20:26.474473 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="extract-utilities" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.474481 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="extract-utilities" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.474567 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae480a27-919c-4a53-9a19-646d9af18fa9" containerName="registry-server" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.475235 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.477125 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.485131 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj"] Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.657746 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.657932 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.657992 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r69l\" (UniqueName: \"kubernetes.io/projected/cac769f2-b794-4b61-95e1-045926cee254-kube-api-access-8r69l\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.759135 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.759234 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r69l\" (UniqueName: \"kubernetes.io/projected/cac769f2-b794-4b61-95e1-045926cee254-kube-api-access-8r69l\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.759311 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.760090 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.760110 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.792604 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r69l\" (UniqueName: \"kubernetes.io/projected/cac769f2-b794-4b61-95e1-045926cee254-kube-api-access-8r69l\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:26 crc kubenswrapper[4885]: I0130 00:20:26.799529 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:27 crc kubenswrapper[4885]: I0130 00:20:27.053271 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj"] Jan 30 00:20:27 crc kubenswrapper[4885]: I0130 00:20:27.700271 4885 generic.go:334] "Generic (PLEG): container finished" podID="cac769f2-b794-4b61-95e1-045926cee254" containerID="e7b448da491a98700fdbbcfb01d21936af1853caa39640d081441fe48b3852a6" exitCode=0 Jan 30 00:20:27 crc kubenswrapper[4885]: I0130 00:20:27.700376 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" event={"ID":"cac769f2-b794-4b61-95e1-045926cee254","Type":"ContainerDied","Data":"e7b448da491a98700fdbbcfb01d21936af1853caa39640d081441fe48b3852a6"} Jan 30 00:20:27 crc kubenswrapper[4885]: I0130 00:20:27.700611 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" event={"ID":"cac769f2-b794-4b61-95e1-045926cee254","Type":"ContainerStarted","Data":"96c7b240f48a0a4acd103bbb3039d565191bfc39baa7fb536912ce992fb7f345"} Jan 30 00:20:27 crc kubenswrapper[4885]: I0130 00:20:27.703439 4885 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 00:20:29 crc kubenswrapper[4885]: I0130 00:20:29.716255 4885 generic.go:334] "Generic (PLEG): container finished" podID="cac769f2-b794-4b61-95e1-045926cee254" containerID="af3d702a2715d5e28fe32976edfb44f4c5f962b2f7325be883a9037a69e046e1" exitCode=0 Jan 30 00:20:29 crc kubenswrapper[4885]: I0130 00:20:29.716384 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" event={"ID":"cac769f2-b794-4b61-95e1-045926cee254","Type":"ContainerDied","Data":"af3d702a2715d5e28fe32976edfb44f4c5f962b2f7325be883a9037a69e046e1"} Jan 30 00:20:30 crc kubenswrapper[4885]: I0130 00:20:30.143529 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:20:30 crc kubenswrapper[4885]: I0130 00:20:30.143588 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:20:30 crc kubenswrapper[4885]: I0130 00:20:30.726457 4885 generic.go:334] "Generic (PLEG): container finished" podID="cac769f2-b794-4b61-95e1-045926cee254" containerID="65c7c3cb7849e417b6b9c39f6380487779353165b4e81094b66f4045b99e00c4" exitCode=0 Jan 30 00:20:30 crc kubenswrapper[4885]: I0130 00:20:30.726557 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" event={"ID":"cac769f2-b794-4b61-95e1-045926cee254","Type":"ContainerDied","Data":"65c7c3cb7849e417b6b9c39f6380487779353165b4e81094b66f4045b99e00c4"} Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.048401 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.231287 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-bundle\") pod \"cac769f2-b794-4b61-95e1-045926cee254\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.231419 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r69l\" (UniqueName: \"kubernetes.io/projected/cac769f2-b794-4b61-95e1-045926cee254-kube-api-access-8r69l\") pod \"cac769f2-b794-4b61-95e1-045926cee254\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.231504 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-util\") pod \"cac769f2-b794-4b61-95e1-045926cee254\" (UID: \"cac769f2-b794-4b61-95e1-045926cee254\") " Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.235618 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-bundle" (OuterVolumeSpecName: "bundle") pod "cac769f2-b794-4b61-95e1-045926cee254" (UID: "cac769f2-b794-4b61-95e1-045926cee254"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.243861 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cac769f2-b794-4b61-95e1-045926cee254-kube-api-access-8r69l" (OuterVolumeSpecName: "kube-api-access-8r69l") pod "cac769f2-b794-4b61-95e1-045926cee254" (UID: "cac769f2-b794-4b61-95e1-045926cee254"). InnerVolumeSpecName "kube-api-access-8r69l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.271025 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-util" (OuterVolumeSpecName: "util") pod "cac769f2-b794-4b61-95e1-045926cee254" (UID: "cac769f2-b794-4b61-95e1-045926cee254"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.333950 4885 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-util\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.333999 4885 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cac769f2-b794-4b61-95e1-045926cee254-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.334018 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r69l\" (UniqueName: \"kubernetes.io/projected/cac769f2-b794-4b61-95e1-045926cee254-kube-api-access-8r69l\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.742103 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" event={"ID":"cac769f2-b794-4b61-95e1-045926cee254","Type":"ContainerDied","Data":"96c7b240f48a0a4acd103bbb3039d565191bfc39baa7fb536912ce992fb7f345"} Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.742153 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96c7b240f48a0a4acd103bbb3039d565191bfc39baa7fb536912ce992fb7f345" Jan 30 00:20:32 crc kubenswrapper[4885]: I0130 00:20:32.742187 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.109528 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq"] Jan 30 00:20:33 crc kubenswrapper[4885]: E0130 00:20:33.109805 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="util" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.109827 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="util" Jan 30 00:20:33 crc kubenswrapper[4885]: E0130 00:20:33.109854 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="pull" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.109864 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="pull" Jan 30 00:20:33 crc kubenswrapper[4885]: E0130 00:20:33.109890 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="extract" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.109901 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="extract" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.110035 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="cac769f2-b794-4b61-95e1-045926cee254" containerName="extract" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.110917 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.113782 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.120691 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq"] Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.244466 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.244543 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhnnm\" (UniqueName: \"kubernetes.io/projected/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-kube-api-access-zhnnm\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.244636 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.345965 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.346087 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhnnm\" (UniqueName: \"kubernetes.io/projected/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-kube-api-access-zhnnm\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.346236 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.347322 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.347902 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.371219 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhnnm\" (UniqueName: \"kubernetes.io/projected/75b8ecd5-1943-4af8-82ad-cbe8578ad0cc-kube-api-access-zhnnm\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq\" (UID: \"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.429080 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.706642 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq"] Jan 30 00:20:33 crc kubenswrapper[4885]: W0130 00:20:33.713645 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75b8ecd5_1943_4af8_82ad_cbe8578ad0cc.slice/crio-2a63d5453e8064f17fe376ba224e19623533c3aeda10f4bee4a8f1fb6b2c7001 WatchSource:0}: Error finding container 2a63d5453e8064f17fe376ba224e19623533c3aeda10f4bee4a8f1fb6b2c7001: Status 404 returned error can't find the container with id 2a63d5453e8064f17fe376ba224e19623533c3aeda10f4bee4a8f1fb6b2c7001 Jan 30 00:20:33 crc kubenswrapper[4885]: I0130 00:20:33.754415 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" event={"ID":"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc","Type":"ContainerStarted","Data":"2a63d5453e8064f17fe376ba224e19623533c3aeda10f4bee4a8f1fb6b2c7001"} Jan 30 00:20:34 crc kubenswrapper[4885]: I0130 00:20:34.767951 4885 generic.go:334] "Generic (PLEG): container finished" podID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" containerID="b2ea79ea4d4b9606b75e49631f0a232778a4d91fc5c19db1e059a119d05f707e" exitCode=0 Jan 30 00:20:34 crc kubenswrapper[4885]: I0130 00:20:34.768039 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" event={"ID":"75b8ecd5-1943-4af8-82ad-cbe8578ad0cc","Type":"ContainerDied","Data":"b2ea79ea4d4b9606b75e49631f0a232778a4d91fc5c19db1e059a119d05f707e"} Jan 30 00:20:35 crc kubenswrapper[4885]: E0130 00:20:35.004278 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" image="registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb" Jan 30 00:20:35 crc kubenswrapper[4885]: E0130 00:20:35.004453 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zhnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_openshift-marketplace(75b8ecd5-1943-4af8-82ad-cbe8578ad0cc): ErrImagePull: initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" logger="UnhandledError" Jan 30 00:20:35 crc kubenswrapper[4885]: E0130 00:20:35.005659 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \\\"https://registry.connect.redhat.com/v2/\\\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:20:35 crc kubenswrapper[4885]: E0130 00:20:35.776284 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:20:38 crc kubenswrapper[4885]: I0130 00:20:38.918175 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r"] Jan 30 00:20:38 crc kubenswrapper[4885]: I0130 00:20:38.919643 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:38 crc kubenswrapper[4885]: I0130 00:20:38.928363 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r"] Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.018745 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6f2r\" (UniqueName: \"kubernetes.io/projected/b1069962-45cc-4659-a3c8-66f6f9a0de10-kube-api-access-l6f2r\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.018836 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.018875 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.120096 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.120174 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.120213 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6f2r\" (UniqueName: \"kubernetes.io/projected/b1069962-45cc-4659-a3c8-66f6f9a0de10-kube-api-access-l6f2r\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.121074 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-util\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.121100 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-bundle\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.150315 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6f2r\" (UniqueName: \"kubernetes.io/projected/b1069962-45cc-4659-a3c8-66f6f9a0de10-kube-api-access-l6f2r\") pod \"925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.233638 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.499742 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r"] Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.793814 4885 generic.go:334] "Generic (PLEG): container finished" podID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerID="0f1e04532930fe5f333c5ac7a22d16b7f0a053f62aa379eec67d706a82d966c7" exitCode=0 Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.793974 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" event={"ID":"b1069962-45cc-4659-a3c8-66f6f9a0de10","Type":"ContainerDied","Data":"0f1e04532930fe5f333c5ac7a22d16b7f0a053f62aa379eec67d706a82d966c7"} Jan 30 00:20:39 crc kubenswrapper[4885]: I0130 00:20:39.794133 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" event={"ID":"b1069962-45cc-4659-a3c8-66f6f9a0de10","Type":"ContainerStarted","Data":"d7986aaed2f13e94f18c3ed3c742db63451ebaf45c4ea709da7cc2f01a599a88"} Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.216493 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.217557 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.220215 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.220304 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-fkp8k" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.220913 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.236338 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.252533 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.253281 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.254670 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-spwpr" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.255019 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.261753 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.262554 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.272070 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.280889 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.357025 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-mns7f"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.357876 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.359624 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-vx5br" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.361446 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.375991 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-mns7f"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.376550 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5767ad85-13c2-45e6-9b9b-b029aa23d546-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz\" (UID: \"5767ad85-13c2-45e6-9b9b-b029aa23d546\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.376606 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gd42\" (UniqueName: \"kubernetes.io/projected/242d3a0e-d0ae-4a31-b367-73fae8817ff4-kube-api-access-9gd42\") pod \"obo-prometheus-operator-68bc856cb9-j4sbv\" (UID: \"242d3a0e-d0ae-4a31-b367-73fae8817ff4\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.376636 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/26ab510c-a00c-491d-a540-30faf4147e68-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5\" (UID: \"26ab510c-a00c-491d-a540-30faf4147e68\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.376667 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/26ab510c-a00c-491d-a540-30faf4147e68-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5\" (UID: \"26ab510c-a00c-491d-a540-30faf4147e68\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.376689 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5767ad85-13c2-45e6-9b9b-b029aa23d546-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz\" (UID: \"5767ad85-13c2-45e6-9b9b-b029aa23d546\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.477846 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx289\" (UniqueName: \"kubernetes.io/projected/23d3c1dd-756d-43b9-a6b5-337a09e3be8f-kube-api-access-gx289\") pod \"observability-operator-59bdc8b94-mns7f\" (UID: \"23d3c1dd-756d-43b9-a6b5-337a09e3be8f\") " pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.477916 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/26ab510c-a00c-491d-a540-30faf4147e68-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5\" (UID: \"26ab510c-a00c-491d-a540-30faf4147e68\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.478044 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5767ad85-13c2-45e6-9b9b-b029aa23d546-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz\" (UID: \"5767ad85-13c2-45e6-9b9b-b029aa23d546\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.478116 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/23d3c1dd-756d-43b9-a6b5-337a09e3be8f-observability-operator-tls\") pod \"observability-operator-59bdc8b94-mns7f\" (UID: \"23d3c1dd-756d-43b9-a6b5-337a09e3be8f\") " pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.478251 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5767ad85-13c2-45e6-9b9b-b029aa23d546-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz\" (UID: \"5767ad85-13c2-45e6-9b9b-b029aa23d546\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.478314 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gd42\" (UniqueName: \"kubernetes.io/projected/242d3a0e-d0ae-4a31-b367-73fae8817ff4-kube-api-access-9gd42\") pod \"obo-prometheus-operator-68bc856cb9-j4sbv\" (UID: \"242d3a0e-d0ae-4a31-b367-73fae8817ff4\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.478370 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/26ab510c-a00c-491d-a540-30faf4147e68-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5\" (UID: \"26ab510c-a00c-491d-a540-30faf4147e68\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.484389 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/26ab510c-a00c-491d-a540-30faf4147e68-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5\" (UID: \"26ab510c-a00c-491d-a540-30faf4147e68\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.484907 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5767ad85-13c2-45e6-9b9b-b029aa23d546-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz\" (UID: \"5767ad85-13c2-45e6-9b9b-b029aa23d546\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.491568 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/26ab510c-a00c-491d-a540-30faf4147e68-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5\" (UID: \"26ab510c-a00c-491d-a540-30faf4147e68\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.493213 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5767ad85-13c2-45e6-9b9b-b029aa23d546-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz\" (UID: \"5767ad85-13c2-45e6-9b9b-b029aa23d546\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.497328 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gd42\" (UniqueName: \"kubernetes.io/projected/242d3a0e-d0ae-4a31-b367-73fae8817ff4-kube-api-access-9gd42\") pod \"obo-prometheus-operator-68bc856cb9-j4sbv\" (UID: \"242d3a0e-d0ae-4a31-b367-73fae8817ff4\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.539167 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.568912 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-pw6h7"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.570849 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.573067 4885 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-gmfzq" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.576763 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.581907 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx289\" (UniqueName: \"kubernetes.io/projected/23d3c1dd-756d-43b9-a6b5-337a09e3be8f-kube-api-access-gx289\") pod \"observability-operator-59bdc8b94-mns7f\" (UID: \"23d3c1dd-756d-43b9-a6b5-337a09e3be8f\") " pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.582034 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/23d3c1dd-756d-43b9-a6b5-337a09e3be8f-observability-operator-tls\") pod \"observability-operator-59bdc8b94-mns7f\" (UID: \"23d3c1dd-756d-43b9-a6b5-337a09e3be8f\") " pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.584902 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.586385 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/23d3c1dd-756d-43b9-a6b5-337a09e3be8f-observability-operator-tls\") pod \"observability-operator-59bdc8b94-mns7f\" (UID: \"23d3c1dd-756d-43b9-a6b5-337a09e3be8f\") " pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.588730 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-pw6h7"] Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.620480 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx289\" (UniqueName: \"kubernetes.io/projected/23d3c1dd-756d-43b9-a6b5-337a09e3be8f-kube-api-access-gx289\") pod \"observability-operator-59bdc8b94-mns7f\" (UID: \"23d3c1dd-756d-43b9-a6b5-337a09e3be8f\") " pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.671135 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.683483 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5sjj\" (UniqueName: \"kubernetes.io/projected/17d1d23a-713e-4a3f-94c9-6de7c19c9bd2-kube-api-access-c5sjj\") pod \"perses-operator-5bf474d74f-pw6h7\" (UID: \"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2\") " pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.683577 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/17d1d23a-713e-4a3f-94c9-6de7c19c9bd2-openshift-service-ca\") pod \"perses-operator-5bf474d74f-pw6h7\" (UID: \"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2\") " pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.785149 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5sjj\" (UniqueName: \"kubernetes.io/projected/17d1d23a-713e-4a3f-94c9-6de7c19c9bd2-kube-api-access-c5sjj\") pod \"perses-operator-5bf474d74f-pw6h7\" (UID: \"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2\") " pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.785242 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/17d1d23a-713e-4a3f-94c9-6de7c19c9bd2-openshift-service-ca\") pod \"perses-operator-5bf474d74f-pw6h7\" (UID: \"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2\") " pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.786156 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/17d1d23a-713e-4a3f-94c9-6de7c19c9bd2-openshift-service-ca\") pod \"perses-operator-5bf474d74f-pw6h7\" (UID: \"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2\") " pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.804354 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5sjj\" (UniqueName: \"kubernetes.io/projected/17d1d23a-713e-4a3f-94c9-6de7c19c9bd2-kube-api-access-c5sjj\") pod \"perses-operator-5bf474d74f-pw6h7\" (UID: \"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2\") " pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:43 crc kubenswrapper[4885]: I0130 00:20:43.919075 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.426629 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5"] Jan 30 00:20:44 crc kubenswrapper[4885]: W0130 00:20:44.434914 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26ab510c_a00c_491d_a540_30faf4147e68.slice/crio-ad8cce350563a4ddc20b5c7dff267fad013c988454c3cd0f787b5129918e9b16 WatchSource:0}: Error finding container ad8cce350563a4ddc20b5c7dff267fad013c988454c3cd0f787b5129918e9b16: Status 404 returned error can't find the container with id ad8cce350563a4ddc20b5c7dff267fad013c988454c3cd0f787b5129918e9b16 Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.471276 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz"] Jan 30 00:20:44 crc kubenswrapper[4885]: W0130 00:20:44.482241 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5767ad85_13c2_45e6_9b9b_b029aa23d546.slice/crio-5b4f149295f117fd0d31005ef0fa1f5f6766eecf0adf5f64d5bc2917fb956216 WatchSource:0}: Error finding container 5b4f149295f117fd0d31005ef0fa1f5f6766eecf0adf5f64d5bc2917fb956216: Status 404 returned error can't find the container with id 5b4f149295f117fd0d31005ef0fa1f5f6766eecf0adf5f64d5bc2917fb956216 Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.574687 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-pw6h7"] Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.826324 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" event={"ID":"26ab510c-a00c-491d-a540-30faf4147e68","Type":"ContainerStarted","Data":"ad8cce350563a4ddc20b5c7dff267fad013c988454c3cd0f787b5129918e9b16"} Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.828011 4885 generic.go:334] "Generic (PLEG): container finished" podID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerID="ee6fb12a177744bf2f264544f09d90e6f42d65f7e98ba8c31b042dc68a5bd5c0" exitCode=0 Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.828064 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" event={"ID":"b1069962-45cc-4659-a3c8-66f6f9a0de10","Type":"ContainerDied","Data":"ee6fb12a177744bf2f264544f09d90e6f42d65f7e98ba8c31b042dc68a5bd5c0"} Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.829043 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" event={"ID":"5767ad85-13c2-45e6-9b9b-b029aa23d546","Type":"ContainerStarted","Data":"5b4f149295f117fd0d31005ef0fa1f5f6766eecf0adf5f64d5bc2917fb956216"} Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.830208 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" event={"ID":"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2","Type":"ContainerStarted","Data":"2b4564355b232fbc38722cf9d0111f9c75be7a1b6e1e68cdfc3392615e20c36f"} Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.833616 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-mns7f"] Jan 30 00:20:44 crc kubenswrapper[4885]: W0130 00:20:44.847628 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23d3c1dd_756d_43b9_a6b5_337a09e3be8f.slice/crio-c743db8411890a4bf976e1f20fc553eea73dceb317694313b786bf7099889970 WatchSource:0}: Error finding container c743db8411890a4bf976e1f20fc553eea73dceb317694313b786bf7099889970: Status 404 returned error can't find the container with id c743db8411890a4bf976e1f20fc553eea73dceb317694313b786bf7099889970 Jan 30 00:20:44 crc kubenswrapper[4885]: I0130 00:20:44.876760 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv"] Jan 30 00:20:44 crc kubenswrapper[4885]: W0130 00:20:44.879166 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod242d3a0e_d0ae_4a31_b367_73fae8817ff4.slice/crio-f2e3bf7ee1ca7ec617d3ed3d803a93a7caaa5f5e163f0c44a4d854c977db72fc WatchSource:0}: Error finding container f2e3bf7ee1ca7ec617d3ed3d803a93a7caaa5f5e163f0c44a4d854c977db72fc: Status 404 returned error can't find the container with id f2e3bf7ee1ca7ec617d3ed3d803a93a7caaa5f5e163f0c44a4d854c977db72fc Jan 30 00:20:45 crc kubenswrapper[4885]: I0130 00:20:45.845889 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" event={"ID":"23d3c1dd-756d-43b9-a6b5-337a09e3be8f","Type":"ContainerStarted","Data":"c743db8411890a4bf976e1f20fc553eea73dceb317694313b786bf7099889970"} Jan 30 00:20:45 crc kubenswrapper[4885]: I0130 00:20:45.862744 4885 generic.go:334] "Generic (PLEG): container finished" podID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerID="16205f8232047cab083052c16ecc2b397453692e0cee8fb2d5825ce87d5b3e35" exitCode=0 Jan 30 00:20:45 crc kubenswrapper[4885]: I0130 00:20:45.862868 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" event={"ID":"b1069962-45cc-4659-a3c8-66f6f9a0de10","Type":"ContainerDied","Data":"16205f8232047cab083052c16ecc2b397453692e0cee8fb2d5825ce87d5b3e35"} Jan 30 00:20:45 crc kubenswrapper[4885]: I0130 00:20:45.869505 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" event={"ID":"242d3a0e-d0ae-4a31-b367-73fae8817ff4","Type":"ContainerStarted","Data":"f2e3bf7ee1ca7ec617d3ed3d803a93a7caaa5f5e163f0c44a4d854c977db72fc"} Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.277186 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.344639 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6f2r\" (UniqueName: \"kubernetes.io/projected/b1069962-45cc-4659-a3c8-66f6f9a0de10-kube-api-access-l6f2r\") pod \"b1069962-45cc-4659-a3c8-66f6f9a0de10\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.344823 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-util\") pod \"b1069962-45cc-4659-a3c8-66f6f9a0de10\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.344877 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-bundle\") pod \"b1069962-45cc-4659-a3c8-66f6f9a0de10\" (UID: \"b1069962-45cc-4659-a3c8-66f6f9a0de10\") " Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.346150 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-bundle" (OuterVolumeSpecName: "bundle") pod "b1069962-45cc-4659-a3c8-66f6f9a0de10" (UID: "b1069962-45cc-4659-a3c8-66f6f9a0de10"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.359262 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-util" (OuterVolumeSpecName: "util") pod "b1069962-45cc-4659-a3c8-66f6f9a0de10" (UID: "b1069962-45cc-4659-a3c8-66f6f9a0de10"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.366267 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1069962-45cc-4659-a3c8-66f6f9a0de10-kube-api-access-l6f2r" (OuterVolumeSpecName: "kube-api-access-l6f2r") pod "b1069962-45cc-4659-a3c8-66f6f9a0de10" (UID: "b1069962-45cc-4659-a3c8-66f6f9a0de10"). InnerVolumeSpecName "kube-api-access-l6f2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.445842 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6f2r\" (UniqueName: \"kubernetes.io/projected/b1069962-45cc-4659-a3c8-66f6f9a0de10-kube-api-access-l6f2r\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.445883 4885 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-util\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.445893 4885 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b1069962-45cc-4659-a3c8-66f6f9a0de10-bundle\") on node \"crc\" DevicePath \"\"" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.904142 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" event={"ID":"b1069962-45cc-4659-a3c8-66f6f9a0de10","Type":"ContainerDied","Data":"d7986aaed2f13e94f18c3ed3c742db63451ebaf45c4ea709da7cc2f01a599a88"} Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.904188 4885 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7986aaed2f13e94f18c3ed3c742db63451ebaf45c4ea709da7cc2f01a599a88" Jan 30 00:20:47 crc kubenswrapper[4885]: I0130 00:20:47.904262 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r" Jan 30 00:20:49 crc kubenswrapper[4885]: E0130 00:20:49.386008 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" image="registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb" Jan 30 00:20:49 crc kubenswrapper[4885]: E0130 00:20:49.386486 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zhnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_openshift-marketplace(75b8ecd5-1943-4af8-82ad-cbe8578ad0cc): ErrImagePull: initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" logger="UnhandledError" Jan 30 00:20:49 crc kubenswrapper[4885]: E0130 00:20:49.387904 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \\\"https://registry.connect.redhat.com/v2/\\\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.212450 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt"] Jan 30 00:20:54 crc kubenswrapper[4885]: E0130 00:20:54.213475 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="extract" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.213543 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="extract" Jan 30 00:20:54 crc kubenswrapper[4885]: E0130 00:20:54.213601 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="pull" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.213653 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="pull" Jan 30 00:20:54 crc kubenswrapper[4885]: E0130 00:20:54.213714 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="util" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.213790 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="util" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.213966 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1069962-45cc-4659-a3c8-66f6f9a0de10" containerName="extract" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.214438 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.217623 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.217675 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.218164 4885 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-vwc4b" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.226949 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt"] Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.334750 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/66a0e26b-5f39-46df-85f8-b5be15102a53-tmp\") pod \"cert-manager-operator-controller-manager-5586865c96-l59wt\" (UID: \"66a0e26b-5f39-46df-85f8-b5be15102a53\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.334999 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlslg\" (UniqueName: \"kubernetes.io/projected/66a0e26b-5f39-46df-85f8-b5be15102a53-kube-api-access-hlslg\") pod \"cert-manager-operator-controller-manager-5586865c96-l59wt\" (UID: \"66a0e26b-5f39-46df-85f8-b5be15102a53\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.436408 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlslg\" (UniqueName: \"kubernetes.io/projected/66a0e26b-5f39-46df-85f8-b5be15102a53-kube-api-access-hlslg\") pod \"cert-manager-operator-controller-manager-5586865c96-l59wt\" (UID: \"66a0e26b-5f39-46df-85f8-b5be15102a53\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.436605 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/66a0e26b-5f39-46df-85f8-b5be15102a53-tmp\") pod \"cert-manager-operator-controller-manager-5586865c96-l59wt\" (UID: \"66a0e26b-5f39-46df-85f8-b5be15102a53\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.437555 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/66a0e26b-5f39-46df-85f8-b5be15102a53-tmp\") pod \"cert-manager-operator-controller-manager-5586865c96-l59wt\" (UID: \"66a0e26b-5f39-46df-85f8-b5be15102a53\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.469853 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlslg\" (UniqueName: \"kubernetes.io/projected/66a0e26b-5f39-46df-85f8-b5be15102a53-kube-api-access-hlslg\") pod \"cert-manager-operator-controller-manager-5586865c96-l59wt\" (UID: \"66a0e26b-5f39-46df-85f8-b5be15102a53\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.543909 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.961075 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt"] Jan 30 00:20:54 crc kubenswrapper[4885]: W0130 00:20:54.974813 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66a0e26b_5f39_46df_85f8_b5be15102a53.slice/crio-8e2d3a3386feaaeb12be1acde219d17c20abb55888d1155888e88a3afdeedcd3 WatchSource:0}: Error finding container 8e2d3a3386feaaeb12be1acde219d17c20abb55888d1155888e88a3afdeedcd3: Status 404 returned error can't find the container with id 8e2d3a3386feaaeb12be1acde219d17c20abb55888d1155888e88a3afdeedcd3 Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.980180 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" event={"ID":"26ab510c-a00c-491d-a540-30faf4147e68","Type":"ContainerStarted","Data":"e490ba5b5a34c4fc541edea2adf28e5b181e6efe20acc35b82b7b13ad2ee95f1"} Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.990232 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" event={"ID":"17d1d23a-713e-4a3f-94c9-6de7c19c9bd2","Type":"ContainerStarted","Data":"5f1049d37614508d09d3ad82c2c0311989d06816a089249d54fa571901284e0a"} Jan 30 00:20:54 crc kubenswrapper[4885]: I0130 00:20:54.990389 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:20:55 crc kubenswrapper[4885]: I0130 00:20:55.026335 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5" podStartSLOduration=1.7347382470000001 podStartE2EDuration="12.026315002s" podCreationTimestamp="2026-01-30 00:20:43 +0000 UTC" firstStartedPulling="2026-01-30 00:20:44.439983785 +0000 UTC m=+731.031455533" lastFinishedPulling="2026-01-30 00:20:54.73156054 +0000 UTC m=+741.323032288" observedRunningTime="2026-01-30 00:20:55.002183749 +0000 UTC m=+741.593655497" watchObservedRunningTime="2026-01-30 00:20:55.026315002 +0000 UTC m=+741.617786750" Jan 30 00:20:55 crc kubenswrapper[4885]: I0130 00:20:55.996040 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" event={"ID":"66a0e26b-5f39-46df-85f8-b5be15102a53","Type":"ContainerStarted","Data":"8e2d3a3386feaaeb12be1acde219d17c20abb55888d1155888e88a3afdeedcd3"} Jan 30 00:20:55 crc kubenswrapper[4885]: I0130 00:20:55.997339 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" event={"ID":"23d3c1dd-756d-43b9-a6b5-337a09e3be8f","Type":"ContainerStarted","Data":"1390bb4830f8986abc2031c673ca103b255ea20c428b623cb493200b932696ee"} Jan 30 00:20:55 crc kubenswrapper[4885]: I0130 00:20:55.997535 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:55 crc kubenswrapper[4885]: I0130 00:20:55.998603 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" event={"ID":"5767ad85-13c2-45e6-9b9b-b029aa23d546","Type":"ContainerStarted","Data":"df90b307edc93e7f807a21f4340c102a35b221e120094d2a7abe2227918aed1c"} Jan 30 00:20:56 crc kubenswrapper[4885]: I0130 00:20:56.000205 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" event={"ID":"242d3a0e-d0ae-4a31-b367-73fae8817ff4","Type":"ContainerStarted","Data":"bb807da23f1458a5a0356478778ee92c04bcd9965bbcfbea2ce46b882bce1f94"} Jan 30 00:20:56 crc kubenswrapper[4885]: I0130 00:20:56.019086 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" podStartSLOduration=2.93184852 podStartE2EDuration="13.019069333s" podCreationTimestamp="2026-01-30 00:20:43 +0000 UTC" firstStartedPulling="2026-01-30 00:20:44.645109697 +0000 UTC m=+731.236581445" lastFinishedPulling="2026-01-30 00:20:54.73233051 +0000 UTC m=+741.323802258" observedRunningTime="2026-01-30 00:20:55.030148235 +0000 UTC m=+741.621619983" watchObservedRunningTime="2026-01-30 00:20:56.019069333 +0000 UTC m=+742.610541081" Jan 30 00:20:56 crc kubenswrapper[4885]: I0130 00:20:56.040323 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" podStartSLOduration=3.136842557 podStartE2EDuration="13.040300419s" podCreationTimestamp="2026-01-30 00:20:43 +0000 UTC" firstStartedPulling="2026-01-30 00:20:44.849882729 +0000 UTC m=+731.441354477" lastFinishedPulling="2026-01-30 00:20:54.753340591 +0000 UTC m=+741.344812339" observedRunningTime="2026-01-30 00:20:56.021260892 +0000 UTC m=+742.612732650" watchObservedRunningTime="2026-01-30 00:20:56.040300419 +0000 UTC m=+742.631772167" Jan 30 00:20:56 crc kubenswrapper[4885]: I0130 00:20:56.048619 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-mns7f" Jan 30 00:20:56 crc kubenswrapper[4885]: I0130 00:20:56.074556 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-j4sbv" podStartSLOduration=3.217447558 podStartE2EDuration="13.074535873s" podCreationTimestamp="2026-01-30 00:20:43 +0000 UTC" firstStartedPulling="2026-01-30 00:20:44.883219528 +0000 UTC m=+731.474691276" lastFinishedPulling="2026-01-30 00:20:54.740307843 +0000 UTC m=+741.331779591" observedRunningTime="2026-01-30 00:20:56.043844554 +0000 UTC m=+742.635316312" watchObservedRunningTime="2026-01-30 00:20:56.074535873 +0000 UTC m=+742.666007621" Jan 30 00:20:56 crc kubenswrapper[4885]: I0130 00:20:56.101213 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz" podStartSLOduration=2.842807954 podStartE2EDuration="13.101198404s" podCreationTimestamp="2026-01-30 00:20:43 +0000 UTC" firstStartedPulling="2026-01-30 00:20:44.493603096 +0000 UTC m=+731.085074844" lastFinishedPulling="2026-01-30 00:20:54.751993546 +0000 UTC m=+741.343465294" observedRunningTime="2026-01-30 00:20:56.070430963 +0000 UTC m=+742.661902711" watchObservedRunningTime="2026-01-30 00:20:56.101198404 +0000 UTC m=+742.692670152" Jan 30 00:21:00 crc kubenswrapper[4885]: I0130 00:21:00.023102 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" event={"ID":"66a0e26b-5f39-46df-85f8-b5be15102a53","Type":"ContainerStarted","Data":"9435efeea34334cf1b61f605b9fddf4271e27b431d950c6f4b130132337ff2f6"} Jan 30 00:21:00 crc kubenswrapper[4885]: I0130 00:21:00.046113 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5586865c96-l59wt" podStartSLOduration=1.2323861489999999 podStartE2EDuration="6.046098609s" podCreationTimestamp="2026-01-30 00:20:54 +0000 UTC" firstStartedPulling="2026-01-30 00:20:54.980296415 +0000 UTC m=+741.571768163" lastFinishedPulling="2026-01-30 00:20:59.794008875 +0000 UTC m=+746.385480623" observedRunningTime="2026-01-30 00:21:00.045780461 +0000 UTC m=+746.637252219" watchObservedRunningTime="2026-01-30 00:21:00.046098609 +0000 UTC m=+746.637570347" Jan 30 00:21:00 crc kubenswrapper[4885]: I0130 00:21:00.143464 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:21:00 crc kubenswrapper[4885]: I0130 00:21:00.143522 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.303052 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-n7q7k"] Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.304854 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.310427 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-n7q7k"] Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.314130 4885 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-rsbrk" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.314237 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.314330 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.359431 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrgh6\" (UniqueName: \"kubernetes.io/projected/50db7dbd-00e1-425a-ada5-f771c61e95b2-kube-api-access-qrgh6\") pod \"cert-manager-webhook-6888856db4-n7q7k\" (UID: \"50db7dbd-00e1-425a-ada5-f771c61e95b2\") " pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.359658 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/50db7dbd-00e1-425a-ada5-f771c61e95b2-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-n7q7k\" (UID: \"50db7dbd-00e1-425a-ada5-f771c61e95b2\") " pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.461179 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrgh6\" (UniqueName: \"kubernetes.io/projected/50db7dbd-00e1-425a-ada5-f771c61e95b2-kube-api-access-qrgh6\") pod \"cert-manager-webhook-6888856db4-n7q7k\" (UID: \"50db7dbd-00e1-425a-ada5-f771c61e95b2\") " pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.461269 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/50db7dbd-00e1-425a-ada5-f771c61e95b2-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-n7q7k\" (UID: \"50db7dbd-00e1-425a-ada5-f771c61e95b2\") " pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.478888 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/50db7dbd-00e1-425a-ada5-f771c61e95b2-bound-sa-token\") pod \"cert-manager-webhook-6888856db4-n7q7k\" (UID: \"50db7dbd-00e1-425a-ada5-f771c61e95b2\") " pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.479341 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrgh6\" (UniqueName: \"kubernetes.io/projected/50db7dbd-00e1-425a-ada5-f771c61e95b2-kube-api-access-qrgh6\") pod \"cert-manager-webhook-6888856db4-n7q7k\" (UID: \"50db7dbd-00e1-425a-ada5-f771c61e95b2\") " pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.618911 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.901028 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-6888856db4-n7q7k"] Jan 30 00:21:03 crc kubenswrapper[4885]: I0130 00:21:03.929029 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-pw6h7" Jan 30 00:21:04 crc kubenswrapper[4885]: I0130 00:21:04.043892 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" event={"ID":"50db7dbd-00e1-425a-ada5-f771c61e95b2","Type":"ContainerStarted","Data":"727b801ff24d66a7efbb346b9a3b9c35c81c9ddf4108caf47e1c7602f2912833"} Jan 30 00:21:04 crc kubenswrapper[4885]: E0130 00:21:04.146484 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.073058 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-ns9n7"] Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.075068 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.077378 4885 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-db6hc" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.096548 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-ns9n7"] Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.199803 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-ns9n7\" (UID: \"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923\") " pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.199927 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lblsk\" (UniqueName: \"kubernetes.io/projected/a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923-kube-api-access-lblsk\") pod \"cert-manager-cainjector-5545bd876-ns9n7\" (UID: \"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923\") " pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.301538 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-ns9n7\" (UID: \"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923\") " pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.301619 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lblsk\" (UniqueName: \"kubernetes.io/projected/a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923-kube-api-access-lblsk\") pod \"cert-manager-cainjector-5545bd876-ns9n7\" (UID: \"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923\") " pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.321564 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lblsk\" (UniqueName: \"kubernetes.io/projected/a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923-kube-api-access-lblsk\") pod \"cert-manager-cainjector-5545bd876-ns9n7\" (UID: \"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923\") " pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.327316 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923-bound-sa-token\") pod \"cert-manager-cainjector-5545bd876-ns9n7\" (UID: \"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923\") " pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.392920 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" Jan 30 00:21:06 crc kubenswrapper[4885]: I0130 00:21:06.727358 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-5545bd876-ns9n7"] Jan 30 00:21:06 crc kubenswrapper[4885]: W0130 00:21:06.740442 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0519bfa_8fa8_4c8a_98f7_2ab2a17b7923.slice/crio-b871d72b0c0a8f40443ea8771e4f2439b8d5f1498664eb1b2d7159d141528d3d WatchSource:0}: Error finding container b871d72b0c0a8f40443ea8771e4f2439b8d5f1498664eb1b2d7159d141528d3d: Status 404 returned error can't find the container with id b871d72b0c0a8f40443ea8771e4f2439b8d5f1498664eb1b2d7159d141528d3d Jan 30 00:21:07 crc kubenswrapper[4885]: I0130 00:21:07.064677 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" event={"ID":"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923","Type":"ContainerStarted","Data":"b871d72b0c0a8f40443ea8771e4f2439b8d5f1498664eb1b2d7159d141528d3d"} Jan 30 00:21:09 crc kubenswrapper[4885]: I0130 00:21:09.716034 4885 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 30 00:21:12 crc kubenswrapper[4885]: I0130 00:21:12.101576 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" event={"ID":"50db7dbd-00e1-425a-ada5-f771c61e95b2","Type":"ContainerStarted","Data":"5c5419e6c0a6263d9564df1d597eb63e497a5f2096716f3506bbdc29822443f5"} Jan 30 00:21:12 crc kubenswrapper[4885]: I0130 00:21:12.102108 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:12 crc kubenswrapper[4885]: I0130 00:21:12.103930 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" event={"ID":"a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923","Type":"ContainerStarted","Data":"db7e7603b2bdd3cfe5f172ae35600cc2e81f0ae7f8b5a4a59fc46ec00c4e54cd"} Jan 30 00:21:12 crc kubenswrapper[4885]: I0130 00:21:12.124103 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" podStartSLOduration=1.232751653 podStartE2EDuration="9.124085186s" podCreationTimestamp="2026-01-30 00:21:03 +0000 UTC" firstStartedPulling="2026-01-30 00:21:03.912880651 +0000 UTC m=+750.504352399" lastFinishedPulling="2026-01-30 00:21:11.804214184 +0000 UTC m=+758.395685932" observedRunningTime="2026-01-30 00:21:12.122301888 +0000 UTC m=+758.713773626" watchObservedRunningTime="2026-01-30 00:21:12.124085186 +0000 UTC m=+758.715556954" Jan 30 00:21:12 crc kubenswrapper[4885]: I0130 00:21:12.149743 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-5545bd876-ns9n7" podStartSLOduration=1.06911866 podStartE2EDuration="6.149723689s" podCreationTimestamp="2026-01-30 00:21:06 +0000 UTC" firstStartedPulling="2026-01-30 00:21:06.742910489 +0000 UTC m=+753.334382237" lastFinishedPulling="2026-01-30 00:21:11.823515518 +0000 UTC m=+758.414987266" observedRunningTime="2026-01-30 00:21:12.148734813 +0000 UTC m=+758.740206561" watchObservedRunningTime="2026-01-30 00:21:12.149723689 +0000 UTC m=+758.741195437" Jan 30 00:21:17 crc kubenswrapper[4885]: E0130 00:21:17.380872 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" image="registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb" Jan 30 00:21:17 crc kubenswrapper[4885]: E0130 00:21:17.381322 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zhnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_openshift-marketplace(75b8ecd5-1943-4af8-82ad-cbe8578ad0cc): ErrImagePull: initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" logger="UnhandledError" Jan 30 00:21:17 crc kubenswrapper[4885]: E0130 00:21:17.382519 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \\\"https://registry.connect.redhat.com/v2/\\\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:21:18 crc kubenswrapper[4885]: I0130 00:21:18.621824 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-6888856db4-n7q7k" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.502989 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-545d4d4674-nsmps"] Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.503895 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.507342 4885 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-jstcw" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.520267 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-nsmps"] Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.635741 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/baf670af-e33a-499c-af65-5d9df86ea0af-bound-sa-token\") pod \"cert-manager-545d4d4674-nsmps\" (UID: \"baf670af-e33a-499c-af65-5d9df86ea0af\") " pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.635852 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx2rj\" (UniqueName: \"kubernetes.io/projected/baf670af-e33a-499c-af65-5d9df86ea0af-kube-api-access-hx2rj\") pod \"cert-manager-545d4d4674-nsmps\" (UID: \"baf670af-e33a-499c-af65-5d9df86ea0af\") " pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.738275 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/baf670af-e33a-499c-af65-5d9df86ea0af-bound-sa-token\") pod \"cert-manager-545d4d4674-nsmps\" (UID: \"baf670af-e33a-499c-af65-5d9df86ea0af\") " pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.738366 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx2rj\" (UniqueName: \"kubernetes.io/projected/baf670af-e33a-499c-af65-5d9df86ea0af-kube-api-access-hx2rj\") pod \"cert-manager-545d4d4674-nsmps\" (UID: \"baf670af-e33a-499c-af65-5d9df86ea0af\") " pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.769739 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx2rj\" (UniqueName: \"kubernetes.io/projected/baf670af-e33a-499c-af65-5d9df86ea0af-kube-api-access-hx2rj\") pod \"cert-manager-545d4d4674-nsmps\" (UID: \"baf670af-e33a-499c-af65-5d9df86ea0af\") " pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.778452 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/baf670af-e33a-499c-af65-5d9df86ea0af-bound-sa-token\") pod \"cert-manager-545d4d4674-nsmps\" (UID: \"baf670af-e33a-499c-af65-5d9df86ea0af\") " pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:22 crc kubenswrapper[4885]: I0130 00:21:22.833615 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-545d4d4674-nsmps" Jan 30 00:21:23 crc kubenswrapper[4885]: I0130 00:21:23.250563 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-545d4d4674-nsmps"] Jan 30 00:21:24 crc kubenswrapper[4885]: I0130 00:21:24.185839 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-nsmps" event={"ID":"baf670af-e33a-499c-af65-5d9df86ea0af","Type":"ContainerStarted","Data":"f4081a8c1ec2b960aa06682dfade3c4eb9804db42674c7269a1849e704c44ce1"} Jan 30 00:21:24 crc kubenswrapper[4885]: I0130 00:21:24.186179 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-545d4d4674-nsmps" event={"ID":"baf670af-e33a-499c-af65-5d9df86ea0af","Type":"ContainerStarted","Data":"7d34482cd37e280d2fc8e70ed10ced714fa0a0f7af13028a4449a7b05de108a8"} Jan 30 00:21:24 crc kubenswrapper[4885]: I0130 00:21:24.212332 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-545d4d4674-nsmps" podStartSLOduration=2.212301974 podStartE2EDuration="2.212301974s" podCreationTimestamp="2026-01-30 00:21:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-30 00:21:24.201697811 +0000 UTC m=+770.793169559" watchObservedRunningTime="2026-01-30 00:21:24.212301974 +0000 UTC m=+770.803773732" Jan 30 00:21:30 crc kubenswrapper[4885]: I0130 00:21:30.144419 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:21:30 crc kubenswrapper[4885]: I0130 00:21:30.144552 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:21:30 crc kubenswrapper[4885]: E0130 00:21:30.144585 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:21:30 crc kubenswrapper[4885]: I0130 00:21:30.160719 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:21:30 crc kubenswrapper[4885]: I0130 00:21:30.161494 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"734caa87bbc7a31bd529920dc9d2ea498fed57fb22424523a2081de46284edd7"} pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 00:21:30 crc kubenswrapper[4885]: I0130 00:21:30.161604 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" containerID="cri-o://734caa87bbc7a31bd529920dc9d2ea498fed57fb22424523a2081de46284edd7" gracePeriod=600 Jan 30 00:21:31 crc kubenswrapper[4885]: I0130 00:21:31.237565 4885 generic.go:334] "Generic (PLEG): container finished" podID="41b99e9c-eadb-404c-9596-1b102ac85157" containerID="734caa87bbc7a31bd529920dc9d2ea498fed57fb22424523a2081de46284edd7" exitCode=0 Jan 30 00:21:31 crc kubenswrapper[4885]: I0130 00:21:31.237627 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerDied","Data":"734caa87bbc7a31bd529920dc9d2ea498fed57fb22424523a2081de46284edd7"} Jan 30 00:21:31 crc kubenswrapper[4885]: I0130 00:21:31.238169 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"f5eaf40abf3a6366dfcb4c81b4d066fc28142bbf2650eea588add5b1620b36e0"} Jan 30 00:21:31 crc kubenswrapper[4885]: I0130 00:21:31.238193 4885 scope.go:117] "RemoveContainer" containerID="b2dbc872a4bd6c3cbe912a7eb4ccfad14a5c3be1740b07b8e8ff70733d344978" Jan 30 00:21:44 crc kubenswrapper[4885]: E0130 00:21:44.152411 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:21:59 crc kubenswrapper[4885]: E0130 00:21:59.385872 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" image="registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb" Jan 30 00:21:59 crc kubenswrapper[4885]: E0130 00:21:59.387056 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zhnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_openshift-marketplace(75b8ecd5-1943-4af8-82ad-cbe8578ad0cc): ErrImagePull: initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" logger="UnhandledError" Jan 30 00:21:59 crc kubenswrapper[4885]: E0130 00:21:59.388432 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \\\"https://registry.connect.redhat.com/v2/\\\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:22:12 crc kubenswrapper[4885]: E0130 00:22:12.150960 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:22:27 crc kubenswrapper[4885]: E0130 00:22:27.144587 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:22:40 crc kubenswrapper[4885]: E0130 00:22:40.145726 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:22:54 crc kubenswrapper[4885]: E0130 00:22:54.147236 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:23:05 crc kubenswrapper[4885]: E0130 00:23:05.144484 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:23:17 crc kubenswrapper[4885]: E0130 00:23:17.144754 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:23:30 crc kubenswrapper[4885]: I0130 00:23:30.144494 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:23:30 crc kubenswrapper[4885]: I0130 00:23:30.145212 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:23:32 crc kubenswrapper[4885]: E0130 00:23:32.388732 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" image="registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb" Jan 30 00:23:32 crc kubenswrapper[4885]: E0130 00:23:32.390208 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zhnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_openshift-marketplace(75b8ecd5-1943-4af8-82ad-cbe8578ad0cc): ErrImagePull: initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" logger="UnhandledError" Jan 30 00:23:32 crc kubenswrapper[4885]: E0130 00:23:32.391650 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \\\"https://registry.connect.redhat.com/v2/\\\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.089741 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mtzwm/must-gather-ptg4v"] Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.092870 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.096126 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mtzwm"/"openshift-service-ca.crt" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.100628 4885 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mtzwm"/"kube-root-ca.crt" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.108308 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mtzwm/must-gather-ptg4v"] Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.139563 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e0b124f1-8033-47b8-a363-1f25643c84b0-must-gather-output\") pod \"must-gather-ptg4v\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.139621 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trfhx\" (UniqueName: \"kubernetes.io/projected/e0b124f1-8033-47b8-a363-1f25643c84b0-kube-api-access-trfhx\") pod \"must-gather-ptg4v\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.240722 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e0b124f1-8033-47b8-a363-1f25643c84b0-must-gather-output\") pod \"must-gather-ptg4v\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.240786 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trfhx\" (UniqueName: \"kubernetes.io/projected/e0b124f1-8033-47b8-a363-1f25643c84b0-kube-api-access-trfhx\") pod \"must-gather-ptg4v\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.241179 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e0b124f1-8033-47b8-a363-1f25643c84b0-must-gather-output\") pod \"must-gather-ptg4v\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.262475 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trfhx\" (UniqueName: \"kubernetes.io/projected/e0b124f1-8033-47b8-a363-1f25643c84b0-kube-api-access-trfhx\") pod \"must-gather-ptg4v\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.412868 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:23:41 crc kubenswrapper[4885]: I0130 00:23:41.694819 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mtzwm/must-gather-ptg4v"] Jan 30 00:23:41 crc kubenswrapper[4885]: W0130 00:23:41.701146 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0b124f1_8033_47b8_a363_1f25643c84b0.slice/crio-05726fff17e97f34f1ba134c283f12862db0a6f1e708a6ef351bcac2eea4c99b WatchSource:0}: Error finding container 05726fff17e97f34f1ba134c283f12862db0a6f1e708a6ef351bcac2eea4c99b: Status 404 returned error can't find the container with id 05726fff17e97f34f1ba134c283f12862db0a6f1e708a6ef351bcac2eea4c99b Jan 30 00:23:42 crc kubenswrapper[4885]: I0130 00:23:42.153392 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" event={"ID":"e0b124f1-8033-47b8-a363-1f25643c84b0","Type":"ContainerStarted","Data":"05726fff17e97f34f1ba134c283f12862db0a6f1e708a6ef351bcac2eea4c99b"} Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.043378 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vchrc"] Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.045857 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.051368 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vchrc"] Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.120189 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-utilities\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.120237 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-catalog-content\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.120266 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw5jn\" (UniqueName: \"kubernetes.io/projected/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-kube-api-access-rw5jn\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.222097 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-utilities\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.222152 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-catalog-content\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.222174 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw5jn\" (UniqueName: \"kubernetes.io/projected/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-kube-api-access-rw5jn\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.222609 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-utilities\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.222783 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-catalog-content\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.241517 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw5jn\" (UniqueName: \"kubernetes.io/projected/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-kube-api-access-rw5jn\") pod \"community-operators-vchrc\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:46 crc kubenswrapper[4885]: I0130 00:23:46.365032 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:47 crc kubenswrapper[4885]: E0130 00:23:47.143000 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:23:48 crc kubenswrapper[4885]: I0130 00:23:48.329433 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vchrc"] Jan 30 00:23:49 crc kubenswrapper[4885]: I0130 00:23:49.237327 4885 generic.go:334] "Generic (PLEG): container finished" podID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerID="6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343" exitCode=0 Jan 30 00:23:49 crc kubenswrapper[4885]: I0130 00:23:49.237401 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vchrc" event={"ID":"f43fe535-6e9d-40e2-83a6-83aa2c4c488b","Type":"ContainerDied","Data":"6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343"} Jan 30 00:23:49 crc kubenswrapper[4885]: I0130 00:23:49.237428 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vchrc" event={"ID":"f43fe535-6e9d-40e2-83a6-83aa2c4c488b","Type":"ContainerStarted","Data":"43c1887e6428d6769d1c9a99d72d56d3dcd0130739a1c3a5127245061371ae17"} Jan 30 00:23:49 crc kubenswrapper[4885]: I0130 00:23:49.242478 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" event={"ID":"e0b124f1-8033-47b8-a363-1f25643c84b0","Type":"ContainerStarted","Data":"3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c"} Jan 30 00:23:49 crc kubenswrapper[4885]: I0130 00:23:49.242515 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" event={"ID":"e0b124f1-8033-47b8-a363-1f25643c84b0","Type":"ContainerStarted","Data":"19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00"} Jan 30 00:23:49 crc kubenswrapper[4885]: I0130 00:23:49.273085 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" podStartSLOduration=1.7477242290000001 podStartE2EDuration="8.273070584s" podCreationTimestamp="2026-01-30 00:23:41 +0000 UTC" firstStartedPulling="2026-01-30 00:23:41.703841472 +0000 UTC m=+908.295313220" lastFinishedPulling="2026-01-30 00:23:48.229187817 +0000 UTC m=+914.820659575" observedRunningTime="2026-01-30 00:23:49.272358353 +0000 UTC m=+915.863830121" watchObservedRunningTime="2026-01-30 00:23:49.273070584 +0000 UTC m=+915.864542332" Jan 30 00:23:51 crc kubenswrapper[4885]: I0130 00:23:51.271494 4885 generic.go:334] "Generic (PLEG): container finished" podID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerID="3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51" exitCode=0 Jan 30 00:23:51 crc kubenswrapper[4885]: I0130 00:23:51.271587 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vchrc" event={"ID":"f43fe535-6e9d-40e2-83a6-83aa2c4c488b","Type":"ContainerDied","Data":"3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51"} Jan 30 00:23:52 crc kubenswrapper[4885]: I0130 00:23:52.279326 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vchrc" event={"ID":"f43fe535-6e9d-40e2-83a6-83aa2c4c488b","Type":"ContainerStarted","Data":"0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6"} Jan 30 00:23:52 crc kubenswrapper[4885]: I0130 00:23:52.308928 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vchrc" podStartSLOduration=3.898906586 podStartE2EDuration="6.308910976s" podCreationTimestamp="2026-01-30 00:23:46 +0000 UTC" firstStartedPulling="2026-01-30 00:23:49.239636558 +0000 UTC m=+915.831108316" lastFinishedPulling="2026-01-30 00:23:51.649640938 +0000 UTC m=+918.241112706" observedRunningTime="2026-01-30 00:23:52.30831473 +0000 UTC m=+918.899786478" watchObservedRunningTime="2026-01-30 00:23:52.308910976 +0000 UTC m=+918.900382724" Jan 30 00:23:55 crc kubenswrapper[4885]: I0130 00:23:55.892476 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t8p5t"] Jan 30 00:23:55 crc kubenswrapper[4885]: I0130 00:23:55.893675 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:55 crc kubenswrapper[4885]: I0130 00:23:55.907705 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t8p5t"] Jan 30 00:23:55 crc kubenswrapper[4885]: I0130 00:23:55.979903 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-catalog-content\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:55 crc kubenswrapper[4885]: I0130 00:23:55.980209 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq7bh\" (UniqueName: \"kubernetes.io/projected/3da20ee2-edea-4b96-b4e2-b838abca2581-kube-api-access-gq7bh\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:55 crc kubenswrapper[4885]: I0130 00:23:55.980355 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-utilities\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.091431 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-catalog-content\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.091748 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq7bh\" (UniqueName: \"kubernetes.io/projected/3da20ee2-edea-4b96-b4e2-b838abca2581-kube-api-access-gq7bh\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.091966 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-utilities\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.092533 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-catalog-content\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.092697 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-utilities\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.109165 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq7bh\" (UniqueName: \"kubernetes.io/projected/3da20ee2-edea-4b96-b4e2-b838abca2581-kube-api-access-gq7bh\") pod \"redhat-operators-t8p5t\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.210108 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.365644 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.368230 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.418281 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:56 crc kubenswrapper[4885]: I0130 00:23:56.476094 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t8p5t"] Jan 30 00:23:57 crc kubenswrapper[4885]: I0130 00:23:57.308584 4885 generic.go:334] "Generic (PLEG): container finished" podID="3da20ee2-edea-4b96-b4e2-b838abca2581" containerID="b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a" exitCode=0 Jan 30 00:23:57 crc kubenswrapper[4885]: I0130 00:23:57.308647 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerDied","Data":"b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a"} Jan 30 00:23:57 crc kubenswrapper[4885]: I0130 00:23:57.308707 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerStarted","Data":"984e371c1e0deead6d1836f47ee5f785c8f63c5f982003e667e28f03d3c3a274"} Jan 30 00:23:57 crc kubenswrapper[4885]: I0130 00:23:57.358120 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:23:58 crc kubenswrapper[4885]: I0130 00:23:58.317417 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerStarted","Data":"e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba"} Jan 30 00:23:58 crc kubenswrapper[4885]: I0130 00:23:58.683103 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vchrc"] Jan 30 00:23:59 crc kubenswrapper[4885]: I0130 00:23:59.327455 4885 generic.go:334] "Generic (PLEG): container finished" podID="3da20ee2-edea-4b96-b4e2-b838abca2581" containerID="e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba" exitCode=0 Jan 30 00:23:59 crc kubenswrapper[4885]: I0130 00:23:59.327550 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerDied","Data":"e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba"} Jan 30 00:24:00 crc kubenswrapper[4885]: E0130 00:24:00.143217 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.143590 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.143636 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.339916 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerStarted","Data":"6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0"} Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.340065 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vchrc" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="registry-server" containerID="cri-o://0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6" gracePeriod=2 Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.382153 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t8p5t" podStartSLOduration=2.805924707 podStartE2EDuration="5.382131819s" podCreationTimestamp="2026-01-30 00:23:55 +0000 UTC" firstStartedPulling="2026-01-30 00:23:57.310368824 +0000 UTC m=+923.901840572" lastFinishedPulling="2026-01-30 00:23:59.886575896 +0000 UTC m=+926.478047684" observedRunningTime="2026-01-30 00:24:00.375453379 +0000 UTC m=+926.966925147" watchObservedRunningTime="2026-01-30 00:24:00.382131819 +0000 UTC m=+926.973603587" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.718869 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.863065 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-utilities\") pod \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.863373 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw5jn\" (UniqueName: \"kubernetes.io/projected/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-kube-api-access-rw5jn\") pod \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.863433 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-catalog-content\") pod \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\" (UID: \"f43fe535-6e9d-40e2-83a6-83aa2c4c488b\") " Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.864091 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-utilities" (OuterVolumeSpecName: "utilities") pod "f43fe535-6e9d-40e2-83a6-83aa2c4c488b" (UID: "f43fe535-6e9d-40e2-83a6-83aa2c4c488b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.869871 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-kube-api-access-rw5jn" (OuterVolumeSpecName: "kube-api-access-rw5jn") pod "f43fe535-6e9d-40e2-83a6-83aa2c4c488b" (UID: "f43fe535-6e9d-40e2-83a6-83aa2c4c488b"). InnerVolumeSpecName "kube-api-access-rw5jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.964574 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw5jn\" (UniqueName: \"kubernetes.io/projected/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-kube-api-access-rw5jn\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:00 crc kubenswrapper[4885]: I0130 00:24:00.964613 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.005677 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f43fe535-6e9d-40e2-83a6-83aa2c4c488b" (UID: "f43fe535-6e9d-40e2-83a6-83aa2c4c488b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.066196 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f43fe535-6e9d-40e2-83a6-83aa2c4c488b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.347394 4885 generic.go:334] "Generic (PLEG): container finished" podID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerID="0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6" exitCode=0 Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.347459 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vchrc" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.347477 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vchrc" event={"ID":"f43fe535-6e9d-40e2-83a6-83aa2c4c488b","Type":"ContainerDied","Data":"0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6"} Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.347521 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vchrc" event={"ID":"f43fe535-6e9d-40e2-83a6-83aa2c4c488b","Type":"ContainerDied","Data":"43c1887e6428d6769d1c9a99d72d56d3dcd0130739a1c3a5127245061371ae17"} Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.347543 4885 scope.go:117] "RemoveContainer" containerID="0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.379221 4885 scope.go:117] "RemoveContainer" containerID="3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.381666 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vchrc"] Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.388814 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vchrc"] Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.409882 4885 scope.go:117] "RemoveContainer" containerID="6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.441081 4885 scope.go:117] "RemoveContainer" containerID="0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6" Jan 30 00:24:01 crc kubenswrapper[4885]: E0130 00:24:01.442058 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6\": container with ID starting with 0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6 not found: ID does not exist" containerID="0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.442159 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6"} err="failed to get container status \"0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6\": rpc error: code = NotFound desc = could not find container \"0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6\": container with ID starting with 0b8d96139f634871edefc40bab8198ffa60ea921b9a99487bd75885b2799a4c6 not found: ID does not exist" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.442193 4885 scope.go:117] "RemoveContainer" containerID="3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51" Jan 30 00:24:01 crc kubenswrapper[4885]: E0130 00:24:01.443094 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51\": container with ID starting with 3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51 not found: ID does not exist" containerID="3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.443140 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51"} err="failed to get container status \"3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51\": rpc error: code = NotFound desc = could not find container \"3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51\": container with ID starting with 3441e88ffbe6fcec63baf94f2b88ebfc93c5206d24b3c1d8d8f5eee020898e51 not found: ID does not exist" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.443170 4885 scope.go:117] "RemoveContainer" containerID="6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343" Jan 30 00:24:01 crc kubenswrapper[4885]: E0130 00:24:01.443427 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343\": container with ID starting with 6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343 not found: ID does not exist" containerID="6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343" Jan 30 00:24:01 crc kubenswrapper[4885]: I0130 00:24:01.443459 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343"} err="failed to get container status \"6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343\": rpc error: code = NotFound desc = could not find container \"6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343\": container with ID starting with 6417c234427e7cb902f00ca8c971a011d0e85c07426ccfea90cdea260f81c343 not found: ID does not exist" Jan 30 00:24:02 crc kubenswrapper[4885]: I0130 00:24:02.149976 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" path="/var/lib/kubelet/pods/f43fe535-6e9d-40e2-83a6-83aa2c4c488b/volumes" Jan 30 00:24:06 crc kubenswrapper[4885]: I0130 00:24:06.210909 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:24:06 crc kubenswrapper[4885]: I0130 00:24:06.211395 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:24:07 crc kubenswrapper[4885]: I0130 00:24:07.246861 4885 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t8p5t" podUID="3da20ee2-edea-4b96-b4e2-b838abca2581" containerName="registry-server" probeResult="failure" output=< Jan 30 00:24:07 crc kubenswrapper[4885]: timeout: failed to connect service ":50051" within 1s Jan 30 00:24:07 crc kubenswrapper[4885]: > Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.552442 4885 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2wtdr"] Jan 30 00:24:10 crc kubenswrapper[4885]: E0130 00:24:10.553034 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="registry-server" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.553050 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="registry-server" Jan 30 00:24:10 crc kubenswrapper[4885]: E0130 00:24:10.553060 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="extract-content" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.553067 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="extract-content" Jan 30 00:24:10 crc kubenswrapper[4885]: E0130 00:24:10.553086 4885 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="extract-utilities" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.553094 4885 state_mem.go:107] "Deleted CPUSet assignment" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="extract-utilities" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.553225 4885 memory_manager.go:354] "RemoveStaleState removing state" podUID="f43fe535-6e9d-40e2-83a6-83aa2c4c488b" containerName="registry-server" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.554065 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.567548 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2wtdr"] Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.663883 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-catalog-content\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.663933 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-utilities\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.663959 4885 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7qxf\" (UniqueName: \"kubernetes.io/projected/969ff0a8-3372-4250-befc-6f0f6d4232ac-kube-api-access-z7qxf\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.765386 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-catalog-content\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.765427 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-utilities\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.765448 4885 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7qxf\" (UniqueName: \"kubernetes.io/projected/969ff0a8-3372-4250-befc-6f0f6d4232ac-kube-api-access-z7qxf\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.766140 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-catalog-content\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.766340 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-utilities\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.798568 4885 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7qxf\" (UniqueName: \"kubernetes.io/projected/969ff0a8-3372-4250-befc-6f0f6d4232ac-kube-api-access-z7qxf\") pod \"certified-operators-2wtdr\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:10 crc kubenswrapper[4885]: I0130 00:24:10.870075 4885 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:11 crc kubenswrapper[4885]: E0130 00:24:11.143120 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:24:11 crc kubenswrapper[4885]: I0130 00:24:11.232906 4885 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2wtdr"] Jan 30 00:24:11 crc kubenswrapper[4885]: W0130 00:24:11.237781 4885 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod969ff0a8_3372_4250_befc_6f0f6d4232ac.slice/crio-51633575c29b3b05e874753bbc0f0039f5ec2c165af752ca066c759ea2d70e52 WatchSource:0}: Error finding container 51633575c29b3b05e874753bbc0f0039f5ec2c165af752ca066c759ea2d70e52: Status 404 returned error can't find the container with id 51633575c29b3b05e874753bbc0f0039f5ec2c165af752ca066c759ea2d70e52 Jan 30 00:24:11 crc kubenswrapper[4885]: I0130 00:24:11.434073 4885 generic.go:334] "Generic (PLEG): container finished" podID="969ff0a8-3372-4250-befc-6f0f6d4232ac" containerID="d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902" exitCode=0 Jan 30 00:24:11 crc kubenswrapper[4885]: I0130 00:24:11.434137 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerDied","Data":"d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902"} Jan 30 00:24:11 crc kubenswrapper[4885]: I0130 00:24:11.434166 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerStarted","Data":"51633575c29b3b05e874753bbc0f0039f5ec2c165af752ca066c759ea2d70e52"} Jan 30 00:24:12 crc kubenswrapper[4885]: I0130 00:24:12.442312 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerStarted","Data":"a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5"} Jan 30 00:24:13 crc kubenswrapper[4885]: I0130 00:24:13.451604 4885 generic.go:334] "Generic (PLEG): container finished" podID="969ff0a8-3372-4250-befc-6f0f6d4232ac" containerID="a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5" exitCode=0 Jan 30 00:24:13 crc kubenswrapper[4885]: I0130 00:24:13.451671 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerDied","Data":"a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5"} Jan 30 00:24:14 crc kubenswrapper[4885]: I0130 00:24:14.459498 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerStarted","Data":"d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23"} Jan 30 00:24:14 crc kubenswrapper[4885]: I0130 00:24:14.477965 4885 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2wtdr" podStartSLOduration=1.977734008 podStartE2EDuration="4.477943142s" podCreationTimestamp="2026-01-30 00:24:10 +0000 UTC" firstStartedPulling="2026-01-30 00:24:11.435453558 +0000 UTC m=+938.026925316" lastFinishedPulling="2026-01-30 00:24:13.935662662 +0000 UTC m=+940.527134450" observedRunningTime="2026-01-30 00:24:14.475080784 +0000 UTC m=+941.066552532" watchObservedRunningTime="2026-01-30 00:24:14.477943142 +0000 UTC m=+941.069414890" Jan 30 00:24:16 crc kubenswrapper[4885]: I0130 00:24:16.271503 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:24:16 crc kubenswrapper[4885]: I0130 00:24:16.326086 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:24:17 crc kubenswrapper[4885]: I0130 00:24:17.932394 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t8p5t"] Jan 30 00:24:17 crc kubenswrapper[4885]: I0130 00:24:17.933251 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t8p5t" podUID="3da20ee2-edea-4b96-b4e2-b838abca2581" containerName="registry-server" containerID="cri-o://6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0" gracePeriod=2 Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.292034 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.371026 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gq7bh\" (UniqueName: \"kubernetes.io/projected/3da20ee2-edea-4b96-b4e2-b838abca2581-kube-api-access-gq7bh\") pod \"3da20ee2-edea-4b96-b4e2-b838abca2581\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.371082 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-catalog-content\") pod \"3da20ee2-edea-4b96-b4e2-b838abca2581\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.371122 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-utilities\") pod \"3da20ee2-edea-4b96-b4e2-b838abca2581\" (UID: \"3da20ee2-edea-4b96-b4e2-b838abca2581\") " Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.371946 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-utilities" (OuterVolumeSpecName: "utilities") pod "3da20ee2-edea-4b96-b4e2-b838abca2581" (UID: "3da20ee2-edea-4b96-b4e2-b838abca2581"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.377714 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3da20ee2-edea-4b96-b4e2-b838abca2581-kube-api-access-gq7bh" (OuterVolumeSpecName: "kube-api-access-gq7bh") pod "3da20ee2-edea-4b96-b4e2-b838abca2581" (UID: "3da20ee2-edea-4b96-b4e2-b838abca2581"). InnerVolumeSpecName "kube-api-access-gq7bh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.472386 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.472415 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gq7bh\" (UniqueName: \"kubernetes.io/projected/3da20ee2-edea-4b96-b4e2-b838abca2581-kube-api-access-gq7bh\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.486383 4885 generic.go:334] "Generic (PLEG): container finished" podID="3da20ee2-edea-4b96-b4e2-b838abca2581" containerID="6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0" exitCode=0 Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.486431 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerDied","Data":"6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0"} Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.486443 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8p5t" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.486461 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8p5t" event={"ID":"3da20ee2-edea-4b96-b4e2-b838abca2581","Type":"ContainerDied","Data":"984e371c1e0deead6d1836f47ee5f785c8f63c5f982003e667e28f03d3c3a274"} Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.486479 4885 scope.go:117] "RemoveContainer" containerID="6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.499189 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3da20ee2-edea-4b96-b4e2-b838abca2581" (UID: "3da20ee2-edea-4b96-b4e2-b838abca2581"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.503916 4885 scope.go:117] "RemoveContainer" containerID="e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.521008 4885 scope.go:117] "RemoveContainer" containerID="b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.556271 4885 scope.go:117] "RemoveContainer" containerID="6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0" Jan 30 00:24:18 crc kubenswrapper[4885]: E0130 00:24:18.557907 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0\": container with ID starting with 6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0 not found: ID does not exist" containerID="6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.557952 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0"} err="failed to get container status \"6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0\": rpc error: code = NotFound desc = could not find container \"6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0\": container with ID starting with 6660a386f948a65026eda99f9665ac464f5928bba0dc602e72b570189839e7a0 not found: ID does not exist" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.557985 4885 scope.go:117] "RemoveContainer" containerID="e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba" Jan 30 00:24:18 crc kubenswrapper[4885]: E0130 00:24:18.558296 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba\": container with ID starting with e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba not found: ID does not exist" containerID="e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.558329 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba"} err="failed to get container status \"e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba\": rpc error: code = NotFound desc = could not find container \"e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba\": container with ID starting with e4f77bbfeb79fa0aab2c1cba2ae754d3b49984800e26ee3d88d12598aebbb1ba not found: ID does not exist" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.558351 4885 scope.go:117] "RemoveContainer" containerID="b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a" Jan 30 00:24:18 crc kubenswrapper[4885]: E0130 00:24:18.560342 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a\": container with ID starting with b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a not found: ID does not exist" containerID="b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.560388 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a"} err="failed to get container status \"b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a\": rpc error: code = NotFound desc = could not find container \"b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a\": container with ID starting with b0888e6149fdad72f64dd4014a85c7a0671b8ab523fe26093ee53088dce76b0a not found: ID does not exist" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.573143 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3da20ee2-edea-4b96-b4e2-b838abca2581-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.826949 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t8p5t"] Jan 30 00:24:18 crc kubenswrapper[4885]: I0130 00:24:18.834095 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t8p5t"] Jan 30 00:24:20 crc kubenswrapper[4885]: I0130 00:24:20.147458 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3da20ee2-edea-4b96-b4e2-b838abca2581" path="/var/lib/kubelet/pods/3da20ee2-edea-4b96-b4e2-b838abca2581/volumes" Jan 30 00:24:20 crc kubenswrapper[4885]: I0130 00:24:20.871252 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:20 crc kubenswrapper[4885]: I0130 00:24:20.871460 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:20 crc kubenswrapper[4885]: I0130 00:24:20.914061 4885 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:21 crc kubenswrapper[4885]: I0130 00:24:21.546655 4885 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:22 crc kubenswrapper[4885]: I0130 00:24:22.130548 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2wtdr"] Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.515272 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2wtdr" podUID="969ff0a8-3372-4250-befc-6f0f6d4232ac" containerName="registry-server" containerID="cri-o://d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23" gracePeriod=2 Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.899249 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.934283 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-catalog-content\") pod \"969ff0a8-3372-4250-befc-6f0f6d4232ac\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.934361 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-utilities\") pod \"969ff0a8-3372-4250-befc-6f0f6d4232ac\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.934407 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7qxf\" (UniqueName: \"kubernetes.io/projected/969ff0a8-3372-4250-befc-6f0f6d4232ac-kube-api-access-z7qxf\") pod \"969ff0a8-3372-4250-befc-6f0f6d4232ac\" (UID: \"969ff0a8-3372-4250-befc-6f0f6d4232ac\") " Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.935648 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-utilities" (OuterVolumeSpecName: "utilities") pod "969ff0a8-3372-4250-befc-6f0f6d4232ac" (UID: "969ff0a8-3372-4250-befc-6f0f6d4232ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.945081 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/969ff0a8-3372-4250-befc-6f0f6d4232ac-kube-api-access-z7qxf" (OuterVolumeSpecName: "kube-api-access-z7qxf") pod "969ff0a8-3372-4250-befc-6f0f6d4232ac" (UID: "969ff0a8-3372-4250-befc-6f0f6d4232ac"). InnerVolumeSpecName "kube-api-access-z7qxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:24:23 crc kubenswrapper[4885]: I0130 00:24:23.986434 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "969ff0a8-3372-4250-befc-6f0f6d4232ac" (UID: "969ff0a8-3372-4250-befc-6f0f6d4232ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.035737 4885 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.036816 4885 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/969ff0a8-3372-4250-befc-6f0f6d4232ac-utilities\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.036841 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7qxf\" (UniqueName: \"kubernetes.io/projected/969ff0a8-3372-4250-befc-6f0f6d4232ac-kube-api-access-z7qxf\") on node \"crc\" DevicePath \"\"" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.522722 4885 generic.go:334] "Generic (PLEG): container finished" podID="969ff0a8-3372-4250-befc-6f0f6d4232ac" containerID="d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23" exitCode=0 Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.522789 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerDied","Data":"d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23"} Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.522822 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wtdr" event={"ID":"969ff0a8-3372-4250-befc-6f0f6d4232ac","Type":"ContainerDied","Data":"51633575c29b3b05e874753bbc0f0039f5ec2c165af752ca066c759ea2d70e52"} Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.522844 4885 scope.go:117] "RemoveContainer" containerID="d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.522992 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wtdr" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.542103 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2wtdr"] Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.545579 4885 scope.go:117] "RemoveContainer" containerID="a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.547300 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2wtdr"] Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.564083 4885 scope.go:117] "RemoveContainer" containerID="d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.581594 4885 scope.go:117] "RemoveContainer" containerID="d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23" Jan 30 00:24:24 crc kubenswrapper[4885]: E0130 00:24:24.582207 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23\": container with ID starting with d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23 not found: ID does not exist" containerID="d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.582239 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23"} err="failed to get container status \"d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23\": rpc error: code = NotFound desc = could not find container \"d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23\": container with ID starting with d4aa724f7ced62bc9a67caa15345ba5bc472b425f87477c108a400a4be018b23 not found: ID does not exist" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.582260 4885 scope.go:117] "RemoveContainer" containerID="a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5" Jan 30 00:24:24 crc kubenswrapper[4885]: E0130 00:24:24.583923 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5\": container with ID starting with a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5 not found: ID does not exist" containerID="a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.583950 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5"} err="failed to get container status \"a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5\": rpc error: code = NotFound desc = could not find container \"a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5\": container with ID starting with a1d4c17175cee557d35b92fd78a71569749fb5bb5bd18326ddf5982da7192af5 not found: ID does not exist" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.583963 4885 scope.go:117] "RemoveContainer" containerID="d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902" Jan 30 00:24:24 crc kubenswrapper[4885]: E0130 00:24:24.584267 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902\": container with ID starting with d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902 not found: ID does not exist" containerID="d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902" Jan 30 00:24:24 crc kubenswrapper[4885]: I0130 00:24:24.584290 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902"} err="failed to get container status \"d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902\": rpc error: code = NotFound desc = could not find container \"d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902\": container with ID starting with d38511737a2e4acd729469f75935a60c7e51c27e391d3ef83b186a901d6f7902 not found: ID does not exist" Jan 30 00:24:25 crc kubenswrapper[4885]: E0130 00:24:25.143981 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:24:26 crc kubenswrapper[4885]: I0130 00:24:26.150085 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="969ff0a8-3372-4250-befc-6f0f6d4232ac" path="/var/lib/kubelet/pods/969ff0a8-3372-4250-befc-6f0f6d4232ac/volumes" Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.144563 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.145161 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.150249 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.151018 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f5eaf40abf3a6366dfcb4c81b4d066fc28142bbf2650eea588add5b1620b36e0"} pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.151181 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" containerID="cri-o://f5eaf40abf3a6366dfcb4c81b4d066fc28142bbf2650eea588add5b1620b36e0" gracePeriod=600 Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.566554 4885 generic.go:334] "Generic (PLEG): container finished" podID="41b99e9c-eadb-404c-9596-1b102ac85157" containerID="f5eaf40abf3a6366dfcb4c81b4d066fc28142bbf2650eea588add5b1620b36e0" exitCode=0 Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.566639 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerDied","Data":"f5eaf40abf3a6366dfcb4c81b4d066fc28142bbf2650eea588add5b1620b36e0"} Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.566947 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"4384a3c053fe3615330ef842a3bb66169bda9b69d63640211f912ddc7de10e03"} Jan 30 00:24:30 crc kubenswrapper[4885]: I0130 00:24:30.566979 4885 scope.go:117] "RemoveContainer" containerID="734caa87bbc7a31bd529920dc9d2ea498fed57fb22424523a2081de46284edd7" Jan 30 00:24:33 crc kubenswrapper[4885]: I0130 00:24:33.623689 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6fm5q_a6dfcb67-43fe-46d9-9349-c581afa2d82f/control-plane-machine-set-operator/0.log" Jan 30 00:24:33 crc kubenswrapper[4885]: I0130 00:24:33.749102 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-d4zrj_51bce3b6-6a4d-45ea-89a7-bf5cf50d7610/kube-rbac-proxy/0.log" Jan 30 00:24:33 crc kubenswrapper[4885]: I0130 00:24:33.801084 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-d4zrj_51bce3b6-6a4d-45ea-89a7-bf5cf50d7610/machine-api-operator/0.log" Jan 30 00:24:39 crc kubenswrapper[4885]: E0130 00:24:39.144264 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:24:46 crc kubenswrapper[4885]: I0130 00:24:46.536113 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-545d4d4674-nsmps_baf670af-e33a-499c-af65-5d9df86ea0af/cert-manager-controller/0.log" Jan 30 00:24:46 crc kubenswrapper[4885]: I0130 00:24:46.648318 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-5545bd876-ns9n7_a0519bfa-8fa8-4c8a-98f7-2ab2a17b7923/cert-manager-cainjector/0.log" Jan 30 00:24:46 crc kubenswrapper[4885]: I0130 00:24:46.719527 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-6888856db4-n7q7k_50db7dbd-00e1-425a-ada5-f771c61e95b2/cert-manager-webhook/0.log" Jan 30 00:24:53 crc kubenswrapper[4885]: E0130 00:24:53.143922 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:25:00 crc kubenswrapper[4885]: I0130 00:25:00.069715 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-j4sbv_242d3a0e-d0ae-4a31-b367-73fae8817ff4/prometheus-operator/0.log" Jan 30 00:25:00 crc kubenswrapper[4885]: I0130 00:25:00.217619 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz_5767ad85-13c2-45e6-9b9b-b029aa23d546/prometheus-operator-admission-webhook/0.log" Jan 30 00:25:00 crc kubenswrapper[4885]: I0130 00:25:00.248552 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5_26ab510c-a00c-491d-a540-30faf4147e68/prometheus-operator-admission-webhook/0.log" Jan 30 00:25:00 crc kubenswrapper[4885]: I0130 00:25:00.380371 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-mns7f_23d3c1dd-756d-43b9-a6b5-337a09e3be8f/operator/0.log" Jan 30 00:25:00 crc kubenswrapper[4885]: I0130 00:25:00.427836 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-pw6h7_17d1d23a-713e-4a3f-94c9-6de7c19c9bd2/perses-operator/0.log" Jan 30 00:25:06 crc kubenswrapper[4885]: E0130 00:25:06.144242 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:25:14 crc kubenswrapper[4885]: I0130 00:25:14.474287 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_75b8ecd5-1943-4af8-82ad-cbe8578ad0cc/util/0.log" Jan 30 00:25:14 crc kubenswrapper[4885]: I0130 00:25:14.609442 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_75b8ecd5-1943-4af8-82ad-cbe8578ad0cc/util/0.log" Jan 30 00:25:14 crc kubenswrapper[4885]: I0130 00:25:14.781380 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_75b8ecd5-1943-4af8-82ad-cbe8578ad0cc/util/0.log" Jan 30 00:25:14 crc kubenswrapper[4885]: I0130 00:25:14.941202 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/util/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.077340 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/util/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.113110 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/pull/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.145153 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/pull/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.239071 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/util/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.262029 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/pull/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.298980 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_925ad1f05bf386dc21bdfe2f8249c1fbfd04a404dec7a7fb6362d758e5sqx8r_b1069962-45cc-4659-a3c8-66f6f9a0de10/extract/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.442163 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/util/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.561891 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/util/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.581757 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/pull/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.595159 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/pull/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.697229 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/util/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.720254 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/pull/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.747286 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08j4bzj_cac769f2-b794-4b61-95e1-045926cee254/extract/0.log" Jan 30 00:25:15 crc kubenswrapper[4885]: I0130 00:25:15.874515 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/extract-utilities/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.039851 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/extract-utilities/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.040224 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/extract-content/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.042802 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/extract-content/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.188141 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/extract-utilities/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.265402 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/extract-content/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.384732 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/extract-utilities/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.416924 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-qj7dd_f8a63fc0-9c8d-4318-a69c-5d6463f40f0d/registry-server/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.510692 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/extract-utilities/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.512230 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/extract-content/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.565756 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/extract-content/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.727815 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/extract-content/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.801505 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/extract-utilities/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.879182 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-pbj45_bc174ab6-d3bd-49a5-a9e5-e49c051dd9a4/registry-server/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.911863 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8628w_1fcfed07-3bc7-4be4-8f00-fb268c4b7821/marketplace-operator/0.log" Jan 30 00:25:16 crc kubenswrapper[4885]: I0130 00:25:16.993315 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/extract-utilities/0.log" Jan 30 00:25:17 crc kubenswrapper[4885]: I0130 00:25:17.136889 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/extract-content/0.log" Jan 30 00:25:17 crc kubenswrapper[4885]: E0130 00:25:17.144080 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:25:17 crc kubenswrapper[4885]: I0130 00:25:17.163458 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/extract-utilities/0.log" Jan 30 00:25:17 crc kubenswrapper[4885]: I0130 00:25:17.217326 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/extract-content/0.log" Jan 30 00:25:17 crc kubenswrapper[4885]: I0130 00:25:17.381485 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/extract-utilities/0.log" Jan 30 00:25:17 crc kubenswrapper[4885]: I0130 00:25:17.382201 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/extract-content/0.log" Jan 30 00:25:17 crc kubenswrapper[4885]: I0130 00:25:17.491323 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9vzzw_d6b6b28d-5a6b-4ce2-b5e9-3834e7b0f145/registry-server/0.log" Jan 30 00:25:29 crc kubenswrapper[4885]: E0130 00:25:29.143094 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:25:29 crc kubenswrapper[4885]: I0130 00:25:29.777824 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-j4sbv_242d3a0e-d0ae-4a31-b367-73fae8817ff4/prometheus-operator/0.log" Jan 30 00:25:29 crc kubenswrapper[4885]: I0130 00:25:29.812759 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-568c6c9c94-pgxkz_5767ad85-13c2-45e6-9b9b-b029aa23d546/prometheus-operator-admission-webhook/0.log" Jan 30 00:25:29 crc kubenswrapper[4885]: I0130 00:25:29.865083 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-568c6c9c94-sdbc5_26ab510c-a00c-491d-a540-30faf4147e68/prometheus-operator-admission-webhook/0.log" Jan 30 00:25:29 crc kubenswrapper[4885]: I0130 00:25:29.942156 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-mns7f_23d3c1dd-756d-43b9-a6b5-337a09e3be8f/operator/0.log" Jan 30 00:25:29 crc kubenswrapper[4885]: I0130 00:25:29.976101 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-pw6h7_17d1d23a-713e-4a3f-94c9-6de7c19c9bd2/perses-operator/0.log" Jan 30 00:25:42 crc kubenswrapper[4885]: E0130 00:25:42.144746 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:25:55 crc kubenswrapper[4885]: E0130 00:25:55.144416 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:26:09 crc kubenswrapper[4885]: E0130 00:26:09.152186 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:26:20 crc kubenswrapper[4885]: I0130 00:26:20.145437 4885 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 30 00:26:20 crc kubenswrapper[4885]: E0130 00:26:20.386342 4885 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" image="registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb" Jan 30 00:26:20 crc kubenswrapper[4885]: E0130 00:26:20.386567 4885 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zhnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq_openshift-marketplace(75b8ecd5-1943-4af8-82ad-cbe8578ad0cc): ErrImagePull: initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \"https://registry.connect.redhat.com/v2/\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving" logger="UnhandledError" Jan 30 00:26:20 crc kubenswrapper[4885]: E0130 00:26:20.387932 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"initializing source docker://registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb: pinging container registry registry.connect.redhat.com: Get \\\"https://registry.connect.redhat.com/v2/\\\": dial tcp: lookup registry.connect.redhat.com on 199.204.47.54:53: server misbehaving\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:26:21 crc kubenswrapper[4885]: I0130 00:26:21.272974 4885 generic.go:334] "Generic (PLEG): container finished" podID="e0b124f1-8033-47b8-a363-1f25643c84b0" containerID="19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00" exitCode=0 Jan 30 00:26:21 crc kubenswrapper[4885]: I0130 00:26:21.273098 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" event={"ID":"e0b124f1-8033-47b8-a363-1f25643c84b0","Type":"ContainerDied","Data":"19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00"} Jan 30 00:26:21 crc kubenswrapper[4885]: I0130 00:26:21.273733 4885 scope.go:117] "RemoveContainer" containerID="19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00" Jan 30 00:26:22 crc kubenswrapper[4885]: I0130 00:26:22.249250 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mtzwm_must-gather-ptg4v_e0b124f1-8033-47b8-a363-1f25643c84b0/gather/0.log" Jan 30 00:26:28 crc kubenswrapper[4885]: I0130 00:26:28.769347 4885 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mtzwm/must-gather-ptg4v"] Jan 30 00:26:28 crc kubenswrapper[4885]: I0130 00:26:28.770478 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" podUID="e0b124f1-8033-47b8-a363-1f25643c84b0" containerName="copy" containerID="cri-o://3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c" gracePeriod=2 Jan 30 00:26:28 crc kubenswrapper[4885]: I0130 00:26:28.779398 4885 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mtzwm/must-gather-ptg4v"] Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.121519 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mtzwm_must-gather-ptg4v_e0b124f1-8033-47b8-a363-1f25643c84b0/copy/0.log" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.122221 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.223453 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trfhx\" (UniqueName: \"kubernetes.io/projected/e0b124f1-8033-47b8-a363-1f25643c84b0-kube-api-access-trfhx\") pod \"e0b124f1-8033-47b8-a363-1f25643c84b0\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.223523 4885 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e0b124f1-8033-47b8-a363-1f25643c84b0-must-gather-output\") pod \"e0b124f1-8033-47b8-a363-1f25643c84b0\" (UID: \"e0b124f1-8033-47b8-a363-1f25643c84b0\") " Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.231694 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b124f1-8033-47b8-a363-1f25643c84b0-kube-api-access-trfhx" (OuterVolumeSpecName: "kube-api-access-trfhx") pod "e0b124f1-8033-47b8-a363-1f25643c84b0" (UID: "e0b124f1-8033-47b8-a363-1f25643c84b0"). InnerVolumeSpecName "kube-api-access-trfhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.272998 4885 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0b124f1-8033-47b8-a363-1f25643c84b0-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "e0b124f1-8033-47b8-a363-1f25643c84b0" (UID: "e0b124f1-8033-47b8-a363-1f25643c84b0"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.324686 4885 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e0b124f1-8033-47b8-a363-1f25643c84b0-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.325212 4885 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trfhx\" (UniqueName: \"kubernetes.io/projected/e0b124f1-8033-47b8-a363-1f25643c84b0-kube-api-access-trfhx\") on node \"crc\" DevicePath \"\"" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.329113 4885 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mtzwm_must-gather-ptg4v_e0b124f1-8033-47b8-a363-1f25643c84b0/copy/0.log" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.329706 4885 generic.go:334] "Generic (PLEG): container finished" podID="e0b124f1-8033-47b8-a363-1f25643c84b0" containerID="3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c" exitCode=143 Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.329761 4885 scope.go:117] "RemoveContainer" containerID="3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.329815 4885 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mtzwm/must-gather-ptg4v" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.357317 4885 scope.go:117] "RemoveContainer" containerID="19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.409903 4885 scope.go:117] "RemoveContainer" containerID="3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c" Jan 30 00:26:29 crc kubenswrapper[4885]: E0130 00:26:29.410484 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c\": container with ID starting with 3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c not found: ID does not exist" containerID="3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.410561 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c"} err="failed to get container status \"3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c\": rpc error: code = NotFound desc = could not find container \"3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c\": container with ID starting with 3bad1eddcf209152a47c92c0872c7d545a17654e4713ac735981d9f224baed1c not found: ID does not exist" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.410593 4885 scope.go:117] "RemoveContainer" containerID="19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00" Jan 30 00:26:29 crc kubenswrapper[4885]: E0130 00:26:29.410963 4885 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00\": container with ID starting with 19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00 not found: ID does not exist" containerID="19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00" Jan 30 00:26:29 crc kubenswrapper[4885]: I0130 00:26:29.411013 4885 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00"} err="failed to get container status \"19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00\": rpc error: code = NotFound desc = could not find container \"19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00\": container with ID starting with 19e29e2c184541a6e6dd1768d5673404081d2c512e33c9bd636856287aa46e00 not found: ID does not exist" Jan 30 00:26:30 crc kubenswrapper[4885]: I0130 00:26:30.144030 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:26:30 crc kubenswrapper[4885]: I0130 00:26:30.144300 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:26:30 crc kubenswrapper[4885]: I0130 00:26:30.149060 4885 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0b124f1-8033-47b8-a363-1f25643c84b0" path="/var/lib/kubelet/pods/e0b124f1-8033-47b8-a363-1f25643c84b0/volumes" Jan 30 00:26:31 crc kubenswrapper[4885]: E0130 00:26:31.145825 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:26:44 crc kubenswrapper[4885]: E0130 00:26:44.151539 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:26:56 crc kubenswrapper[4885]: E0130 00:26:56.145495 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:27:00 crc kubenswrapper[4885]: I0130 00:27:00.143422 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:27:00 crc kubenswrapper[4885]: I0130 00:27:00.143791 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:27:08 crc kubenswrapper[4885]: E0130 00:27:08.145812 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:27:19 crc kubenswrapper[4885]: E0130 00:27:19.145857 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.143974 4885 patch_prober.go:28] interesting pod/machine-config-daemon-bmd5j container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.144743 4885 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.152137 4885 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.153364 4885 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4384a3c053fe3615330ef842a3bb66169bda9b69d63640211f912ddc7de10e03"} pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.153467 4885 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" podUID="41b99e9c-eadb-404c-9596-1b102ac85157" containerName="machine-config-daemon" containerID="cri-o://4384a3c053fe3615330ef842a3bb66169bda9b69d63640211f912ddc7de10e03" gracePeriod=600 Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.755611 4885 generic.go:334] "Generic (PLEG): container finished" podID="41b99e9c-eadb-404c-9596-1b102ac85157" containerID="4384a3c053fe3615330ef842a3bb66169bda9b69d63640211f912ddc7de10e03" exitCode=0 Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.755670 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerDied","Data":"4384a3c053fe3615330ef842a3bb66169bda9b69d63640211f912ddc7de10e03"} Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.756002 4885 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bmd5j" event={"ID":"41b99e9c-eadb-404c-9596-1b102ac85157","Type":"ContainerStarted","Data":"9e8f90ad533b83435f0722bcdd1190775e4e413c2c5727d8ada9a03b225429e0"} Jan 30 00:27:30 crc kubenswrapper[4885]: I0130 00:27:30.756020 4885 scope.go:117] "RemoveContainer" containerID="f5eaf40abf3a6366dfcb4c81b4d066fc28142bbf2650eea588add5b1620b36e0" Jan 30 00:27:34 crc kubenswrapper[4885]: E0130 00:27:34.149721 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:27:47 crc kubenswrapper[4885]: E0130 00:27:47.143511 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:27:58 crc kubenswrapper[4885]: E0130 00:27:58.145823 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:28:09 crc kubenswrapper[4885]: E0130 00:28:09.144111 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:28:20 crc kubenswrapper[4885]: E0130 00:28:20.145735 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" Jan 30 00:28:33 crc kubenswrapper[4885]: E0130 00:28:33.146338 4885 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/eck@sha256:815e6949d8b96d832660e6ed715f8fbf080b230f1bccfc3e0f38781585b14eeb\\\"\"" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e2t8gq" podUID="75b8ecd5-1943-4af8-82ad-cbe8578ad0cc" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515136775503024461 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015136775504017377 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015136772645016525 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015136772646015476 5ustar corecore